|
| 1 | +pipeline: |
| 2 | + projectIdentifier: migration |
| 3 | + orgIdentifier: default |
| 4 | + tags: {} |
| 5 | + stages: |
| 6 | + - stage: |
| 7 | + name: process data |
| 8 | + identifier: process_data |
| 9 | + description: "" |
| 10 | + type: CI |
| 11 | + spec: |
| 12 | + cloneCodebase: false |
| 13 | + execution: |
| 14 | + steps: |
| 15 | + - parallel: |
| 16 | + - stepGroup: |
| 17 | + identifier: Download_Traces |
| 18 | + name: Download Traces |
| 19 | + steps: |
| 20 | + - step: |
| 21 | + identifier: Download_Traces |
| 22 | + type: Run |
| 23 | + name: Download Traces |
| 24 | + spec: |
| 25 | + connectorRef: account.harnessImage |
| 26 | + image: amazon/aws-cli |
| 27 | + shell: Sh |
| 28 | + command: |- |
| 29 | + mkdir -p traces |
| 30 | +
|
| 31 | + aws s3 cp s3://$TRACE_BUCKET/ traces/archived/ --recursive |
| 32 | + envVariables: |
| 33 | + AWS_ACCESS_KEY_ID: AKIA2V2EZZUNIHFZNMDQ |
| 34 | + AWS_SECRET_ACCESS_KEY: <+secrets.getValue("ciplayawss3secret")> |
| 35 | + outputVariables: |
| 36 | + - name: version |
| 37 | + type: String |
| 38 | + value: version |
| 39 | + - step: |
| 40 | + identifier: untar |
| 41 | + type: Run |
| 42 | + name: untar |
| 43 | + spec: |
| 44 | + connectorRef: account.harnessImage |
| 45 | + image: ubuntu |
| 46 | + shell: Bash |
| 47 | + command: |- |
| 48 | + # Function to unzip all zip files in a folder |
| 49 | + folder_path="/harness/traces/archived" |
| 50 | + extract_folder="/harness/traces/extracted" |
| 51 | + mkdir -p "$extract_folder" |
| 52 | +
|
| 53 | + # Loop through all zip files in the folder |
| 54 | + for zip_file in "$folder_path"/*.zip; do |
| 55 | + # Check if there are any zip files |
| 56 | + if [[ -e "$zip_file" ]]; then |
| 57 | + # Create a directory for extraction (same name as the zip file, without the extension) |
| 58 | + |
| 59 | + # Unzip the file |
| 60 | + echo "Extracting $zip_file to $extract_folder" |
| 61 | + unzip -n -q "$zip_file" -d "$extract_folder" | true |
| 62 | + rm -rf "$zip_file" |
| 63 | + fi |
| 64 | + done |
| 65 | + - stepGroup: |
| 66 | + name: get binaries |
| 67 | + identifier: get_binaries |
| 68 | + steps: |
| 69 | + - step: |
| 70 | + type: Run |
| 71 | + name: download binaries |
| 72 | + identifier: doneload_binary |
| 73 | + spec: |
| 74 | + connectorRef: account.harnessImage |
| 75 | + image: ubuntu |
| 76 | + shell: Sh |
| 77 | + command: |- |
| 78 | + wget -O bin.zip https://elasticbeanstalk-us-east-1-734046833946.s3.us-east-1.amazonaws.com/bin.zip |
| 79 | + unzip -q -o bin.zip -d . |
| 80 | + chmod -R +x bin |
| 81 | + - step: |
| 82 | + type: Run |
| 83 | + name: Run Migration |
| 84 | + identifier: Run_Migration |
| 85 | + spec: |
| 86 | + connectorRef: account.harnessImage |
| 87 | + image: openjdk:11 |
| 88 | + shell: Bash |
| 89 | + command: |- |
| 90 | + export HOME="/root" |
| 91 | + cd bin |
| 92 | +
|
| 93 | + archive_dir=/harness/traces/extracted |
| 94 | +
|
| 95 | + script_dir=$(dirname "$0") |
| 96 | + base_out_dir="$archive_dir/files" |
| 97 | + merged_dir="$base_out_dir/4-merged" |
| 98 | + convert_dir="$base_out_dir/5-converted" |
| 99 | +
|
| 100 | + mkdir -p "$merged_dir" |
| 101 | + mkdir -p "convert_dir" |
| 102 | +
|
| 103 | + export _JAVA_OPTIONS="-Xms2g -Xmx12g" |
| 104 | + cli_jar="cli-wrapper-3.2.6-SNAPSHOT.jar" |
| 105 | +
|
| 106 | + echo "Starting merge" |
| 107 | + java -jar "$cli_jar" -t "$archive_dir" -o merged.zip |
| 108 | + unzip -q -o merged.zip -d "$merged_dir" |
| 109 | + rm merged.zip |
| 110 | +
|
| 111 | + # step 4: convert |
| 112 | + echo "Starting convert" |
| 113 | + mkdir -p "$convert_dir" |
| 114 | + ./go-convert jenkinsjson -downgrade -project "$PROJECT" -output-dir "$convert_dir" "$merged_dir" |
| 115 | + echo |
| 116 | + echo "Conversion Done" |
| 117 | + echo "Converted YAML files are stored here: $convert_dir" |
| 118 | + echo |
| 119 | +
|
| 120 | + # publish result to a tar |
| 121 | + tar -czvf result.tar.gz "$base_out_dir" |
| 122 | + contextType: Pipeline |
| 123 | + - step: |
| 124 | + type: Run |
| 125 | + name: Upload Results |
| 126 | + identifier: Upload_Results |
| 127 | + spec: |
| 128 | + connectorRef: account.harnessImage |
| 129 | + image: amazon/aws-cli |
| 130 | + shell: Sh |
| 131 | + command: aws s3 cp bin/result.tar.gz s3://$RESULT_BUCKET/processed/ |
| 132 | + envVariables: |
| 133 | + AWS_ACCESS_KEY_ID: AKIA2V2EZZUNIHFZNMDQ |
| 134 | + AWS_SECRET_ACCESS_KEY: <+secrets.getValue("ciplayawss3secret")> |
| 135 | + when: |
| 136 | + stageStatus: Success |
| 137 | + condition: <+stage.variables.UPLOAD_RESULT> |
| 138 | + - step: |
| 139 | + type: Run |
| 140 | + name: Post Pipeline |
| 141 | + identifier: Post_Pipeline |
| 142 | + spec: |
| 143 | + connectorRef: account.harnessImage |
| 144 | + image: python:3.8.20 |
| 145 | + shell: Python |
| 146 | + command: |- |
| 147 | + import requests |
| 148 | + import os |
| 149 | +
|
| 150 | + # URL of the API endpoint |
| 151 | + url = f"https://app.harness.io/pipeline/api/pipelines/v2?accountIdentifier={os.environ['ACCOUNT']}&orgIdentifier={os.environ['ORG']}&projectIdentifier={os.environ['PROJECT']}&storeType=INLINE&" |
| 152 | +
|
| 153 | + # API Key for authentication |
| 154 | + api_key = os.environ['API_KEY'] |
| 155 | +
|
| 156 | + headers = { |
| 157 | + 'Content-Type': 'application/yaml', |
| 158 | + 'x-api-key': api_key |
| 159 | + } |
| 160 | +
|
| 161 | + folder_path = "/harness/traces/extracted/files/5-converted" |
| 162 | +
|
| 163 | + for filename in os.listdir(folder_path): |
| 164 | + file_path = os.path.join(folder_path, filename) |
| 165 | + |
| 166 | + # Only process files that end with .yaml or .yml |
| 167 | + if file_path.endswith(('.yaml', '.yml')): |
| 168 | + try: |
| 169 | + with open(file_path, 'r') as file: |
| 170 | + yaml_content = file.read() |
| 171 | + |
| 172 | + # Send the POST request with the YAML content as the body |
| 173 | + response = requests.post(url, headers=headers, data=yaml_content) |
| 174 | + |
| 175 | + # Print the response status and content |
| 176 | + print(f"Sent {filename} - Status Code: {response.status_code}") |
| 177 | + if response.status_code != 200: |
| 178 | + print(f"Error: {response.text}") |
| 179 | + else: |
| 180 | + print(f"Response for {filename}: {response.text[:100]}...") # Print part of the response |
| 181 | + |
| 182 | + except Exception as e: |
| 183 | + print(f"Failed to process {filename}: {e}") |
| 184 | + else: |
| 185 | + print(f"Skipping non-YAML file: {filename}") |
| 186 | + when: |
| 187 | + stageStatus: Success |
| 188 | + condition: <+stage.variables.POST_PIPELINE> |
| 189 | + caching: |
| 190 | + enabled: false |
| 191 | + paths: [] |
| 192 | + buildIntelligence: |
| 193 | + enabled: false |
| 194 | + infrastructure: |
| 195 | + type: KubernetesDirect |
| 196 | + spec: |
| 197 | + connectorRef: aws_k8_new |
| 198 | + namespace: aws_k8_new |
| 199 | + automountServiceAccountToken: true |
| 200 | + nodeSelector: {} |
| 201 | + os: Linux |
| 202 | + variables: |
| 203 | + - name: TRACE_BUCKET |
| 204 | + type: String |
| 205 | + description: "" |
| 206 | + required: false |
| 207 | + value: <+input> |
| 208 | + - name: RESULT_BUCKET |
| 209 | + type: String |
| 210 | + description: "" |
| 211 | + required: false |
| 212 | + value: <+input> |
| 213 | + - name: UPLOAD_RESULT |
| 214 | + type: String |
| 215 | + description: "" |
| 216 | + required: false |
| 217 | + value: <+input>.selectOneFrom("true","false") |
| 218 | + - name: POST_PIPELINE |
| 219 | + type: String |
| 220 | + description: "" |
| 221 | + required: false |
| 222 | + value: <+input>.selectOneFrom("true","false") |
| 223 | + - name: PROJECT |
| 224 | + type: String |
| 225 | + description: "" |
| 226 | + required: true |
| 227 | + value: <+input> |
| 228 | + - name: ORG |
| 229 | + type: String |
| 230 | + description: "" |
| 231 | + required: true |
| 232 | + value: <+input> |
| 233 | + - name: ACCOUNT |
| 234 | + type: String |
| 235 | + description: "" |
| 236 | + required: false |
| 237 | + value: <+input> |
| 238 | + - name: API_KEY |
| 239 | + type: Secret |
| 240 | + description: "" |
| 241 | + required: false |
| 242 | + value: jamie_api_key_harness |
| 243 | + identifier: Customer_Jenkins_Data_Processing |
| 244 | + name: Customer Jenkins Data Processing |
0 commit comments