1+ #! /bin/sh
2+
3+ # This script is designed to test functionality of data uploading & extraction.
4+ # Creates a dataset and upload files, submit for extraction to ncsa.file.digest, verify result, delete.
5+
6+ # Clowder URL and key to use as well as file can be defined below.
7+ # Needs to have 'jq' installed.
8+
9+ CLOWDER_URL=${CLOWDER_URL:- " " }
10+ CLOWDER_KEY=${CLOWDER_KEY:- " " }
11+ TARGET_FILE=${TARGET_FILE:- " " }
12+
13+ # Slack token for notifications
14+ SLACK_TOKEN=${SLACK_TOKEN:- " " }
15+ SLACK_CHANNEL=${SLACK_CHANNEL:- " #github" }
16+ SLACK_USER=${SLACK_USER:- " NCSA Build" }
17+
18+
19+ post_message () {
20+ printf " $1 \n"
21+ if [ " ${SLACK_TOKEN} " != " " -a " ${SLACK_CHANNEL} " != " " ]; then
22+ url=" https://hooks.slack.com/services/${SLACK_TOKEN} "
23+ txt=$( printf " $1 \n" | tr ' \n' " \\ n" | sed ' s/"/\\"/g' )
24+ payload=" payload={\" channel\" : \" ${SLACK_CHANNEL} \" , \" username\" : \" ${SLACK_USER} \" , \" text\" : \" ${txt} \" }"
25+ result=$( curl -s -X POST --data-urlencode " ${payload} " $url )
26+ fi
27+ }
28+
29+ # ------------------------ Create dataset ------------------------
30+ DATASET_ID=$( curl -s -X POST -H " Content-Type: application/json" \
31+ -d ' {"name":"Temporary Test Dataset", "description":"Created automatically by test script."}' \
32+ $CLOWDER_URL /api/datasets/createempty? key=$CLOWDER_KEY )
33+ DATASET_ID=$( echo $DATASET_ID | jq ' .id' | sed s/\" //g)
34+ echo " Dataset ID: $DATASET_ID "
35+
36+ # ------------------------ Upload file ------------------------
37+ FILE_ID=$( curl -X POST -F File=@$TARGET_FILE \
38+ $CLOWDER_URL /api/uploadToDataset/$DATASET_ID ? key=$CLOWDER_KEY & extract=0)
39+ FILE_ID=$( echo $FILE_ID | jq ' .id' | sed s/\" //g)
40+ echo " File ID: $FILE_ID "
41+
42+ # Validate upload
43+ FILE_UPLOADED=0
44+ RETRIES=0
45+ while [ $FILE_UPLOADED = 0 ]; do
46+ RESPONSE=$( curl -X GET -H " Content-Type: application/json" \
47+ $CLOWDER_URL /api/files/$FILE_ID /metadata? key=$CLOWDER_KEY )
48+ RESPONSE=$( echo $RESPONSE | jq ' .status' | sed s/\" //g)
49+ if [ " $RESPONSE " = " PROCESSED" ]; then
50+ FILE_UPLOADED=1
51+ fi
52+ RETRIES=$(( RETRIES+ 1 ))
53+ if [ $RETRIES = 12 ]; then
54+ echo " File upload not PROCESSED after 2 minutes. There may be a problem. Deleting dataset."
55+ curl -X DELETE $CLOWDER_URL /api/datasets/$DATASET_ID ? key=$CLOWDER_KEY
56+ post_message " Upload+extract test script failing on $CLOWDER_URL \/files\/$FILE_ID (status is not PROCESSED)"
57+ exit 1
58+ fi
59+ echo " File upload not complete; checking again in 10 seconds."
60+ sleep 10
61+ done
62+ echo " File upload complete."
63+
64+ # ------------------------ Submit for extraction ------------------------
65+ curl -X POST -H " Content-Type: application/json" \
66+ -d ' {"extractor": "ncsa.file.digest"}' \
67+ $CLOWDER_URL /api/files/$FILE_ID /extractions? key=$CLOWDER_KEY
68+
69+ # Validate extraction
70+ FILE_EXTRACTED=0
71+ RETRIES=0
72+ while [ $FILE_EXTRACTED -eq 0 ]; do
73+ RESPONSE=$( curl -X GET -H " Content-Type: application/json" \
74+ $CLOWDER_URL /api/extractions/$FILE_ID /status? key=$CLOWDER_KEY )
75+ echo $RESPONSE
76+ RESPONSE=$( echo $RESPONSE | jq ' ."ncsa.file.digest"' | sed s/\" //g)
77+ if [ " $RESPONSE " = " DONE" ]; then
78+ FILE_EXTRACTED=1
79+ post_message " Extractor: [ncsa.file.digest] success $CLOWDER_URL /files/$FILE_ID "
80+ fi
81+ RETRIES=$(( RETRIES+ 1 ))
82+ if [ $RETRIES = 24 ]; then
83+ echo " File extraction not DONE after 4 minutes. There may be a problem. Deleting dataset."
84+ curl -X DELETE $CLOWDER_URL /api/datasets/$DATASET_ID ? key=$CLOWDER_KEY
85+ post_message " Upload+extract test script failing on $CLOWDER_URL /files/$FILE_ID (extractor not DONE)"
86+ exit 1
87+ fi
88+ echo " File extraction not complete; checking again in 10 seconds."
89+ sleep 10
90+ done
91+ echo " File extraction complete."
92+
93+
94+ # ------------------------ Delete dataset ------------------------
95+ curl -X DELETE $CLOWDER_URL /api/datasets/$DATASET_ID ? key=$CLOWDER_KEY
96+
97+ echo " Test complete."
0 commit comments