|
| 1 | +#!/bin/sh |
| 2 | + |
| 3 | +# This script is designed to test functionality of data uploading & extraction. |
| 4 | +# Creates a dataset and upload files, submit for extraction to ncsa.file.digest, verify result, delete. |
| 5 | + |
| 6 | +# Clowder URL and key to use as well as file can be defined below. |
| 7 | +# Needs to have 'jq' installed. |
| 8 | + |
| 9 | +CLOWDER_URL= |
| 10 | +CLOWDER_KEY= |
| 11 | +TARGET_FILE= |
| 12 | + |
| 13 | +# Slack token for notifications |
| 14 | +SLACK_TOKEN=${SLACK_TOKEN:-""} |
| 15 | +SLACK_CHANNEL=${SLACK_CHANNEL:-"#github"} |
| 16 | +SLACK_USER=${SLACK_USER:-"NCSA Build"} |
| 17 | + |
| 18 | + |
| 19 | +post_message() { |
| 20 | + printf "$1\n" |
| 21 | + if [ "${SLACK_TOKEN}" != "" -a "${SLACK_CHANNEL}" != "" ]; then |
| 22 | + url="https://hooks.slack.com/services/${SLACK_TOKEN}" |
| 23 | + txt=$( printf "$1\n" | tr '\n' "\\n" | sed 's/"/\\"/g' ) |
| 24 | + payload="payload={\"channel\": \"${SLACK_CHANNEL}\", \"username\": \"${SLACK_USER}\", \"text\": \"${txt}\"}" |
| 25 | + result=$(curl -s -X POST --data-urlencode "${payload}" $url) |
| 26 | + fi |
| 27 | +} |
| 28 | + |
| 29 | +# ------------------------ Create dataset ------------------------ |
| 30 | +DATASET_ID=$(curl -X POST -H "Content-Type: application/json" \ |
| 31 | + -d '{"name":"Temporary Test Dataset", "description":"Created automatically by test script."}' \ |
| 32 | + $CLOWDER_URL/api/datasets/createempty?key=$CLOWDER_KEY) |
| 33 | +DATASET_ID=$(echo $DATASET_ID | jq '.id' | sed s/\"//g) |
| 34 | +echo "Dataset ID: $DATASET_ID" |
| 35 | + |
| 36 | +# ------------------------ Upload file ------------------------ |
| 37 | +FILE_ID=$(curl -X POST -H "Content-Type: application/json" \ |
| 38 | + -F File=@$TARGET_FILE \ |
| 39 | + $CLOWDER_URL/api/uploadToDataset/$DATASET_ID?key=$CLOWDER_KEY&extract=0) |
| 40 | +FILE_ID=$(echo $FILE_ID | jq '.id' | sed s/\"//g) |
| 41 | +echo "File ID: $FILE_ID" |
| 42 | + |
| 43 | +# Validate upload |
| 44 | +FILE_UPLOADED=0 |
| 45 | +RETRIES=0 |
| 46 | +while [ $FILE_UPLOADED = 0 ]; do |
| 47 | + RESPONSE=$(curl -X GET -H "Content-Type: application/json" \ |
| 48 | + $CLOWDER_URL/api/files/$FILE_ID/metadata?key=$CLOWDER_KEY) |
| 49 | + RESPONSE=$(echo $RESPONSE | jq '.status' | sed s/\"//g) |
| 50 | + if [ "$RESPONSE" = "PROCESSED" ]; then |
| 51 | + FILE_UPLOADED=1 |
| 52 | + fi |
| 53 | + RETRIES=$((RETRIES+1)) |
| 54 | + if [ $RETRIES = 12 ]; then |
| 55 | + echo "File upload not PROCESSED after 2 minutes. There may be a problem. Deleting dataset." |
| 56 | + curl -X DELETE $CLOWDER_URL/api/datasets/$DATASET_ID?key=$CLOWDER_KEY |
| 57 | + post_message("Upload+extract test script failing on $CLOWDER_URL/files/$FILE_ID (status is not PROCESSED)") |
| 58 | + exit 1 |
| 59 | + fi |
| 60 | + echo "File upload not complete; checking again in 10 seconds." |
| 61 | + sleep 10 |
| 62 | +done |
| 63 | +echo "File upload complete." |
| 64 | + |
| 65 | +# ------------------------ Submit for extraction ------------------------ |
| 66 | +curl -X POST -H "Content-Type: application/json" \ |
| 67 | + -d '{"extractor": "ncsa.file.digest"}' \ |
| 68 | + $CLOWDER_URL/api/files/$FILE_ID/extractions?key=$CLOWDER_KEY |
| 69 | + |
| 70 | +# Validate extraction |
| 71 | +FILE_EXTRACTED=0 |
| 72 | +RETRIES=0 |
| 73 | +while [ FILE_EXTRACTED = 0 ]; do |
| 74 | + RESPONSE=$(curl -X GET -H "Content-Type: application/json" \ |
| 75 | + $CLOWDER_URL/api/extractions/$FILE_ID/status?key=$CLOWDER_KEY) |
| 76 | + RESPONSE=$(echo $RESPONSE | jq '.ncsa.file.digest' | sed s/\"//g) |
| 77 | + if [ "$RESPONSE" = "DONE" ]; then |
| 78 | + FILE_EXTRACTED=1 |
| 79 | + fi |
| 80 | + RETRIES=$((RETRIES+1)) |
| 81 | + if [ $RETRIES = 24 ]; then |
| 82 | + echo "File extraction not DONE after 4 minutes. There may be a problem. Deleting dataset." |
| 83 | + curl -X DELETE $CLOWDER_URL/api/datasets/$DATASET_ID?key=$CLOWDER_KEY |
| 84 | + post_message("Upload+extract test script failing on $CLOWDER_URL/files/$FILE_ID (extractor not DONE)") |
| 85 | + exit 1 |
| 86 | + fi |
| 87 | + echo "File extraction not complete; checking again in 10 seconds." |
| 88 | + sleep 10 |
| 89 | +done |
| 90 | +echo "File extraction complete." |
| 91 | + |
| 92 | + |
| 93 | +# ------------------------ Delete dataset ------------------------ |
| 94 | +curl -X DELETE $CLOWDER_URL/api/datasets/$DATASET_ID?key=$CLOWDER_KEY |
| 95 | + |
| 96 | +echo "Test complete." |
0 commit comments