-
Notifications
You must be signed in to change notification settings - Fork 3
140 lines (118 loc) · 4.68 KB
/
main_pr.yml
File metadata and controls
140 lines (118 loc) · 4.68 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
name: Pull Request trigger
on:
pull_request:
permissions:
id-token: write
contents: write
pull-requests: write
issues: write
jobs:
initialize_workflow:
runs-on: ubuntu-latest
outputs:
run_dataplex: ${{ steps.get-changed.outputs.dataplex_changed }}
run_scenarios: ${{ steps.get-changed.outputs.scenarios_changed }}
run_spark_dataproc: ${{ steps.get-changed.outputs.spark_dataproc_changed }}
ol_release: ${{ steps.get-release.outputs.openlineage_release }}
any_run: ${{ steps.get-changed.outputs.any_changed }}
test_matrix: ${{ steps.set-matrix-values.outputs.spark_dataproc_matrix }}
steps:
- name: Checkout code
uses: actions/checkout@v4
- name: check file structure
id: check-structure
run: ./scripts/check_structure.sh
- name: check configs
id: check-configs
run: ./scripts/check_configs.sh
- name: get changed files
id: get-changed
run: |
check_path() {
local path=$1
local output=$2
if echo "$CHANGED_FILES" | grep -q "$path"; then
echo "$output=true" >> $GITHUB_OUTPUT
echo "true"
fi
}
CHANGED_FILES=$(gh pr diff ${{ github.event.pull_request.number }} --name-only)
if [[ -n "$CHANGED_FILES" ]]; then
echo "changes=$(echo "$CHANGED_FILES" | jq -R -s -c 'split("\n")[:-1]')" >> $GITHUB_OUTPUT
scenarios=$(check_path "consumer/scenarios/" "scenarios_changed")
dataplex=$(check_path "consumer/consumers/dataplex/" "dataplex_changed")
spark_dataproc=$(check_path "producer/spark_dataproc/" "spark_dataproc_changed")
if [[ $scenarios || $dataplex || $spark_dataproc ]]; then
echo "any_changed=true" >> $GITHUB_OUTPUT
fi
fi
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: get openlineage release
id: get-release
run: |
echo " any changed value is ${{ steps.get-changed.outputs.any_changed }}"
openlineage_release=$(cat generated-files/releases.json | jq -c '.[] | select(.name | contains("openlineage")) | .latest_version ' -r)
echo "openlineage_release=${openlineage_release}" >> $GITHUB_OUTPUT
- name: set-matrix-values
id: set-matrix-values
run: |
check_producer() {
local producer="$1"
local file="./producer/${producer}/versions.json"
if [[ -f "$file" ]]; then
cat "$file" | jq -c
else
echo "Error: File '$file' does not exist." >&2
return 1
fi
}
echo "spark_dataproc_matrix=$(check_producer spark_dataproc)" >> $GITHUB_OUTPUT
# echo "myoutput=$(jq -cn --argjson environments "$TARGETS" '{target: $environments}')" >> $GITHUB_OUTPUT
######## COMPONENT VALIDATION ########
scenarios:
needs: initialize_workflow
if: ${{ needs.initialize_workflow.outputs.run_scenarios == 'true' }}
uses: ./.github/workflows/check_scenarios.yml
with:
get-latest-snapshots: false
release: ${{ needs.initialize_workflow.outputs.ol_release }}
dataplex:
needs:
- initialize_workflow
- scenarios
if: ${{ !failure() && needs.initialize_workflow.outputs.run_dataplex == 'true' }}
uses: ./.github/workflows/consumer_dataplex.yml
secrets:
gcpKey: ${{ secrets.GCP_SA_KEY }}
with:
release: ${{ needs.initialize_workflow.outputs.ol_release }}
spark_dataproc:
needs: initialize_workflow
if: ${{ needs.initialize_workflow.outputs.run_spark_dataproc == 'true' }}
uses: ./.github/workflows/producer_spark_dataproc.yml
strategy:
matrix: ${{ fromJson(needs.initialize_workflow.outputs.test_matrix) }}
secrets:
gcpKey: ${{ secrets.GCP_SA_KEY }}
postgresqlUser: ${{ secrets.POSTGRESQL_USER }}
postgresqlPassword: ${{ secrets.POSTGRESQL_PASSWORD }}
with:
ol_release: ${{ matrix.openlineage_versions }}
spark_release: ${{ matrix.component_version }}
get-latest-snapshots: 'false'
######## COLLECTION OF REPORTS AND EXECUTE APPROPRIATE ACTIONS ########
collect-and-compare-reports:
needs:
- initialize_workflow
- scenarios
- dataplex
- spark_dataproc
if: ${{ !failure() && needs.initialize_workflow.outputs.any_run == 'true'}}
uses: ./.github/workflows/collect_and_compare_reports.yml
with:
fail-for-new-failures: true
generate-compatibility-tables:
needs:
- collect-and-compare-reports
uses: ./.github/workflows/generate_compatibility_tables.yml