Skip to content

Commit 8d539e2

Browse files
authored
Merge branch 'Azure:master' into master
2 parents a181f75 + c106ac2 commit 8d539e2

File tree

83 files changed

+2325
-4699
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

83 files changed

+2325
-4699
lines changed

.github/workflows/runAsimSchemaAndDataTesters.yaml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@
22
# The script runs ASIM Schema and Data testers on the "eco-connector-test" workspace.
33
name: Run ASIM tests on "ASIM-SchemaDataTester-GithubShared" workspace
44
on:
5-
pull_request_target:
5+
pull_request:
66
types: [opened, edited, reopened, synchronize, labeled]
77
branches:
88
- master
@@ -419,4 +419,4 @@ jobs:
419419
echo "Downloading script from the master: $url"
420420
curl -o "$filePath" "$url"
421421
# Execute the script
422-
python "$filePath"
422+
python "$filePath"

.script/bundleAwsS3Scripts.sh

Lines changed: 214 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,214 @@
1+
#!/bin/bash
2+
# Script to bundle AWS-S3 DataConnector scripts into zip files
3+
# This script creates the ConfigAwsS3DataConnectorScripts.zip and ConfigAwsS3DataConnectorScriptsGov.zip files
4+
# It extracts existing zips and only replaces modified files to preserve unchanged content
5+
6+
set -e
7+
8+
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
9+
REPO_ROOT="$(cd "$SCRIPT_DIR/.." && pwd)"
10+
AWS_S3_DIR="$REPO_ROOT/DataConnectors/AWS-S3"
11+
TEMP_DIR=$(mktemp -d)
12+
13+
echo "Building AWS-S3 DataConnector script bundles..."
14+
echo "Working directory: $TEMP_DIR"
15+
16+
cleanup() {
17+
echo "Cleaning up temporary directory..."
18+
rm -rf "$TEMP_DIR"
19+
}
20+
trap cleanup EXIT
21+
22+
cd "$AWS_S3_DIR"
23+
24+
# Create temporary directories for building zips
25+
mkdir -p "$TEMP_DIR/com" "$TEMP_DIR/gov"
26+
27+
# Get list of changed files in the AWS-S3 directory from the last commit
28+
get_changed_files() {
29+
local base_ref="${GITHUB_BASE_REF:-HEAD~1}" # Use GitHub base ref or previous commit
30+
local changed_files=()
31+
32+
# Get all changed files in the AWS-S3 directory, excluding zip files
33+
while IFS= read -r file; do
34+
# Skip if no file (empty output from git diff)
35+
[[ -z "$file" ]] && continue
36+
37+
# Skip zip files and BUNDLE_AUTOMATION.md (documentation only)
38+
[[ "$file" == *.zip ]] && continue
39+
[[ "$file" == *"BUNDLE_AUTOMATION.md" ]] && continue
40+
41+
# Remove the DataConnectors/AWS-S3/ prefix to get relative path
42+
local relative_file="${file#DataConnectors/AWS-S3/}"
43+
if [[ "$relative_file" != "$file" ]] && [[ -n "$relative_file" ]]; then # File is in AWS-S3 directory and not empty
44+
changed_files+=("$relative_file")
45+
fi
46+
done < <(git diff --name-only "$base_ref" HEAD -- "DataConnectors/AWS-S3/" 2>/dev/null || true)
47+
48+
# Only output if we have files
49+
if [[ ${#changed_files[@]} -gt 0 ]]; then
50+
printf '%s\n' "${changed_files[@]}"
51+
fi
52+
}
53+
54+
# Replace the hardcoded FILES_TO_BUNDLE with dynamic detection
55+
mapfile -t FILES_TO_BUNDLE < <(get_changed_files)
56+
57+
# Fallback: if no files changed, include all relevant files
58+
if [[ ${#FILES_TO_BUNDLE[@]} -eq 0 ]]; then
59+
echo "No changes detected, including all files..."
60+
FILES_TO_BUNDLE=(
61+
"AwsRequiredPolicies.md"
62+
"AwsRequiredPoliciesForGov.md"
63+
"CloudFormation/cloudformationtemplateforAWSS3.txt"
64+
"ConfigAwsConnector.ps1"
65+
"ConfigCloudTrailDataConnector.ps1"
66+
"ConfigCloudWatchDataConnector.ps1"
67+
"ConfigCustomLogDataConnector.ps1"
68+
"ConfigGuardDutyDataConnector.ps1"
69+
"ConfigVpcFlowDataConnector.ps1"
70+
"ConfigVpcFlowLogs.ps1"
71+
"Enviornment/EnviornmentConstants.ps1"
72+
"README.md"
73+
"Utils/AwsPoliciesUpdate.ps1"
74+
"Utils/AwsResourceCreator.ps1"
75+
"Utils/AwsSentinelTag.ps1"
76+
"Utils/CommonAwsPolicies.ps1"
77+
"Utils/HelperFunctions.ps1"
78+
)
79+
else
80+
echo "Detected ${#FILES_TO_BUNDLE[@]} changed file(s) to update in bundles:"
81+
printf ' - %s\n' "${FILES_TO_BUNDLE[@]}"
82+
fi
83+
84+
# Function to extract existing zip if it exists, or create empty directory
85+
extract_or_create() {
86+
local zip_path="$1"
87+
local extract_dir="$2"
88+
89+
mkdir -p "$extract_dir"
90+
91+
if [ -f "$zip_path" ]; then
92+
echo " Extracting existing $zip_path..."
93+
unzip -q "$zip_path" -d "$extract_dir" 2>/dev/null || true
94+
else
95+
echo " Creating new bundle (no existing zip found)..."
96+
fi
97+
}
98+
99+
# Function to update files in directory (only replace if source exists and is different)
100+
update_files() {
101+
local dest_dir="$1"
102+
shift
103+
local files=("$@")
104+
105+
for file in "${files[@]}"; do
106+
if [ -f "$AWS_S3_DIR/$file" ]; then
107+
local dir_path=$(dirname "$dest_dir/$file")
108+
mkdir -p "$dir_path"
109+
# Only copy if file doesn't exist or is different
110+
if [ ! -f "$dest_dir/$file" ] || ! cmp -s "$AWS_S3_DIR/$file" "$dest_dir/$file"; then
111+
cp "$AWS_S3_DIR/$file" "$dest_dir/$file"
112+
echo " Updated: $file"
113+
fi
114+
else
115+
echo " Warning: File not found in source: $file"
116+
fi
117+
done
118+
}
119+
120+
# Function to create a nested zip file
121+
create_nested_zip() {
122+
local parent_zip="$1"
123+
local nested_zip_name="$2"
124+
local work_dir="$3"
125+
local lambda_version="$4" # "v1" or "v2"
126+
127+
echo "Processing $nested_zip_name..."
128+
129+
# Create temporary directory for this nested zip
130+
local nested_dir="$work_dir/${nested_zip_name%.zip}"
131+
132+
# Extract existing nested zip from parent if it exists
133+
if [ -f "$AWS_S3_DIR/$parent_zip" ]; then
134+
local parent_extract="$work_dir/parent_extract"
135+
mkdir -p "$parent_extract"
136+
unzip -q "$AWS_S3_DIR/$parent_zip" -d "$parent_extract" 2>/dev/null || true
137+
138+
if [ -f "$parent_extract/$nested_zip_name" ]; then
139+
extract_or_create "$parent_extract/$nested_zip_name" "$nested_dir"
140+
else
141+
mkdir -p "$nested_dir"
142+
fi
143+
rm -rf "$parent_extract"
144+
else
145+
mkdir -p "$nested_dir"
146+
fi
147+
148+
# Update common files (only replace modified ones)
149+
update_files "$nested_dir" "${FILES_TO_BUNDLE[@]}"
150+
151+
# Update appropriate Lambda function version
152+
if [ "$lambda_version" = "v2" ]; then
153+
if [ -f "$AWS_S3_DIR/CloudWatchLambdaFunction.py" ]; then
154+
if [ ! -f "$nested_dir/CloudWatchLambdaFunction.py" ] || ! cmp -s "$AWS_S3_DIR/CloudWatchLambdaFunction.py" "$nested_dir/CloudWatchLambdaFunction.py"; then
155+
cp "$AWS_S3_DIR/CloudWatchLambdaFunction.py" "$nested_dir/CloudWatchLambdaFunction.py"
156+
echo " Updated: CloudWatchLambdaFunction.py"
157+
fi
158+
fi
159+
if [ -f "$AWS_S3_DIR/CloudWatchLambdaFunction_V2.py" ]; then
160+
if [ ! -f "$nested_dir/CloudWatchLambdaFunction_V2.py" ] || ! cmp -s "$AWS_S3_DIR/CloudWatchLambdaFunction_V2.py" "$nested_dir/CloudWatchLambdaFunction_V2.py"; then
161+
cp "$AWS_S3_DIR/CloudWatchLambdaFunction_V2.py" "$nested_dir/CloudWatchLambdaFunction_V2.py"
162+
echo " Updated: CloudWatchLambdaFunction_V2.py"
163+
fi
164+
fi
165+
else
166+
if [ -f "$AWS_S3_DIR/CloudWatchLambdaFunction.py" ]; then
167+
if [ ! -f "$nested_dir/CloudWatchLambdaFunction.py" ] || ! cmp -s "$AWS_S3_DIR/CloudWatchLambdaFunction.py" "$nested_dir/CloudWatchLambdaFunction.py"; then
168+
cp "$AWS_S3_DIR/CloudWatchLambdaFunction.py" "$nested_dir/CloudWatchLambdaFunction.py"
169+
echo " Updated: CloudWatchLambdaFunction.py"
170+
fi
171+
fi
172+
# Remove V2 if it exists (shouldn't be in gov bundles)
173+
if [ -f "$nested_dir/CloudWatchLambdaFunction_V2.py" ]; then
174+
rm "$nested_dir/CloudWatchLambdaFunction_V2.py"
175+
echo " Removed: CloudWatchLambdaFunction_V2.py (not needed for gov)"
176+
fi
177+
fi
178+
179+
# Create the zip file
180+
cd "$nested_dir"
181+
zip -q -r "$work_dir/$nested_zip_name" . -i "*"
182+
183+
# Clean up nested directory
184+
rm -rf "$nested_dir"
185+
186+
echo "✓ Created $nested_zip_name"
187+
}
188+
189+
# Create ConfigAwsS3DataConnectorScripts.zip (Commercial Azure - includes V2)
190+
echo ""
191+
echo "Building ConfigAwsS3DataConnectorScripts.zip..."
192+
create_nested_zip "ConfigAwsS3DataConnectorScripts.zip" "ConfigAwsComToAzureCom.zip" "$TEMP_DIR/com" "v2"
193+
create_nested_zip "ConfigAwsS3DataConnectorScripts.zip" "ConfigAwsGovToAzureCom.zip" "$TEMP_DIR/com" "v2"
194+
195+
cd "$TEMP_DIR/com"
196+
zip -q "ConfigAwsS3DataConnectorScripts.zip" ConfigAwsComToAzureCom.zip ConfigAwsGovToAzureCom.zip
197+
cp "ConfigAwsS3DataConnectorScripts.zip" "$AWS_S3_DIR/"
198+
echo "✓ Created ConfigAwsS3DataConnectorScripts.zip"
199+
200+
# Create ConfigAwsS3DataConnectorScriptsGov.zip (Government Azure - no V2)
201+
echo ""
202+
echo "Building ConfigAwsS3DataConnectorScriptsGov.zip..."
203+
create_nested_zip "ConfigAwsS3DataConnectorScriptsGov.zip" "ConfigAwsComToAzureGov.zip" "$TEMP_DIR/gov" "v1"
204+
create_nested_zip "ConfigAwsS3DataConnectorScriptsGov.zip" "ConfigAwsGovToAzureGov.zip" "$TEMP_DIR/gov" "v1"
205+
206+
cd "$TEMP_DIR/gov"
207+
zip -q "ConfigAwsS3DataConnectorScriptsGov.zip" ConfigAwsComToAzureGov.zip ConfigAwsGovToAzureGov.zip
208+
cp "ConfigAwsS3DataConnectorScriptsGov.zip" "$AWS_S3_DIR/"
209+
echo "✓ Created ConfigAwsS3DataConnectorScriptsGov.zip"
210+
211+
echo ""
212+
echo "✅ Successfully created all AWS-S3 DataConnector script bundles!"
213+
echo " - ConfigAwsS3DataConnectorScripts.zip"
214+
echo " - ConfigAwsS3DataConnectorScriptsGov.zip"

Solutions/Snowflake/Data Connectors/SnowflakeLogs_ccp/SnowflakeLogs_PollingConfig.json

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -16,6 +16,7 @@
1616
"apiEndpoint": "[[concat('https://',parameters('accountId'),'.snowflakecomputing.com','/api/v2/statements?retry=true')]",
1717
"httpMethod": "POST",
1818
"queryWindowInMin": 10,
19+
"queryWindowDelayInMin": 120,
1920
"queryTimeFormat": "yyyy-MM-ddTHH:mm:ssZ",
2021
"retryCount": 5,
2122
"timeoutInSeconds": 180,
@@ -69,6 +70,7 @@
6970
"apiEndpoint": "[[concat('https://',parameters('accountId'),'.snowflakecomputing.com','/api/v2/statements?retry=true')]",
7071
"httpMethod": "POST",
7172
"queryWindowInMin": 10,
73+
"queryWindowDelayInMin": 120,
7274
"queryTimeFormat": "yyyy-MM-ddTHH:mm:ssZ",
7375
"retryCount": 5,
7476
"timeoutInSeconds": 180,
@@ -122,6 +124,7 @@
122124
"apiEndpoint": "[[concat('https://',parameters('accountId'),'.snowflakecomputing.com','/api/v2/statements?retry=true')]",
123125
"httpMethod": "POST",
124126
"queryWindowInMin": 10,
127+
"queryWindowDelayInMin": 120,
125128
"queryTimeFormat": "yyyy-MM-ddTHH:mm:ssZ",
126129
"retryCount": 5,
127130
"timeoutInSeconds": 180,
@@ -175,6 +178,7 @@
175178
"apiEndpoint": "[[concat('https://',parameters('accountId'),'.snowflakecomputing.com','/api/v2/statements?retry=true')]",
176179
"httpMethod": "POST",
177180
"queryWindowInMin": 10,
181+
"queryWindowDelayInMin": 120,
178182
"queryTimeFormat": "yyyy-MM-ddTHH:mm:ssZ",
179183
"retryCount": 5,
180184
"timeoutInSeconds": 180,
@@ -228,6 +232,7 @@
228232
"apiEndpoint": "[[concat('https://',parameters('accountId'),'.snowflakecomputing.com','/api/v2/statements?retry=true')]",
229233
"httpMethod": "POST",
230234
"queryWindowInMin": 10,
235+
"queryWindowDelayInMin": 120,
231236
"queryTimeFormat": "yyyy-MM-ddTHH:mm:ssZ",
232237
"retryCount": 5,
233238
"timeoutInSeconds": 180,
@@ -281,6 +286,7 @@
281286
"apiEndpoint": "[[concat('https://',parameters('accountId'),'.snowflakecomputing.com','/api/v2/statements?retry=true')]",
282287
"httpMethod": "POST",
283288
"queryWindowInMin": 10,
289+
"queryWindowDelayInMin": 120,
284290
"queryTimeFormat": "yyyy-MM-ddTHH:mm:ssZ",
285291
"retryCount": 5,
286292
"timeoutInSeconds": 180,
@@ -334,6 +340,7 @@
334340
"apiEndpoint": "[[concat('https://',parameters('accountId'),'.snowflakecomputing.com','/api/v2/statements?retry=true')]",
335341
"httpMethod": "POST",
336342
"queryWindowInMin": 10,
343+
"queryWindowDelayInMin": 120,
337344
"queryTimeFormat": "yyyy-MM-ddTHH:mm:ssZ",
338345
"retryCount": 5,
339346
"timeoutInSeconds": 180,
@@ -387,6 +394,7 @@
387394
"apiEndpoint": "[[concat('https://',parameters('accountId'),'.snowflakecomputing.com','/api/v2/statements?retry=true')]",
388395
"httpMethod": "POST",
389396
"queryWindowInMin": 10,
397+
"queryWindowDelayInMin": 120,
390398
"queryTimeFormat": "yyyy-MM-ddTHH:mm:ssZ",
391399
"retryCount": 5,
392400
"timeoutInSeconds": 180,
@@ -440,6 +448,7 @@
440448
"apiEndpoint": "[[concat('https://',parameters('accountId'),'.snowflakecomputing.com','/api/v2/statements?retry=true')]",
441449
"httpMethod": "POST",
442450
"queryWindowInMin": 10,
451+
"queryWindowDelayInMin": 120,
443452
"queryTimeFormat": "yyyy-MM-ddTHH:mm:ssZ",
444453
"retryCount": 5,
445454
"timeoutInSeconds": 180,
@@ -493,6 +502,7 @@
493502
"apiEndpoint": "[[concat('https://',parameters('accountId'),'.snowflakecomputing.com','/api/v2/statements?retry=true')]",
494503
"httpMethod": "POST",
495504
"queryWindowInMin": 10,
505+
"queryWindowDelayInMin": 120,
496506
"queryTimeFormat": "yyyy-MM-ddTHH:mm:ssZ",
497507
"retryCount": 5,
498508
"timeoutInSeconds": 180,

Solutions/Snowflake/Data/Solution_Snowflake.json

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -38,7 +38,7 @@
3838
],
3939
"Metadata": "SolutionMetadata.json",
4040
"BasePath": "C:\\GitHub\\Azure-Sentinel\\Solutions\\Snowflake",
41-
"Version": "3.0.7",
41+
"Version": "3.0.8",
4242
"TemplateSpec": true,
4343
"Is1PConnector": false
4444
}
38.1 KB
Binary file not shown.

0 commit comments

Comments
 (0)