Skip to content

Commit 5968d66

Browse files
committed
Pull request #1434: Create tester.sh script
Merge in CATS/clowder from add-tester-script to develop * commit '1f9e6c038b740071444c8873111bb3977bcf05a6': update updates update Dockerfile updates to use Ubuntu dockerfile updates remove old geo metadata cotaining invalidate geo url script to submit geoserver extractions add slack message notification Create tester.sh
2 parents aedcdf2 + 1f9e6c0 commit 5968d66

File tree

3 files changed

+115
-0
lines changed

3 files changed

+115
-0
lines changed

CHANGELOG.md

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -9,6 +9,7 @@ and this project adheres to [Semantic Versioning](http://semver.org/).
99
of these changes a reindex of Elasticsearch is required. This can be started by an admin either from GUI or through the API.**
1010

1111
### Added
12+
- add script to test Clowder extraction.
1213
- Added ability to delete extractor, both from api and GUI.
1314
[CATS-1044](https://opensource.ncsa.illinois.edu/jira/browse/CATS-1044)
1415
- API add tags endpoint now returns the added tags.

scripts/tester/Dockerfile

Lines changed: 17 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,17 @@
1+
FROM ubuntu:16.04
2+
3+
4+
# environemnt variables
5+
ENV CLOWDER_URL=${CLOWDER_URL} \
6+
CLOWDER_KEY=${CLOWDER_KEY} \
7+
TARGET_FILE=${TARGET_FILE} \
8+
SLACK_TOKEN=${SLACK_TOKEN} \
9+
SLACK_CHANNEL=${SLACK_CHANNEL} \
10+
SLACK_USER=${SLACK_USER}
11+
12+
13+
RUN apt-get update && apt-get install -y curl jq && apt-get clean && rm -rf /var/lib/apt/lists
14+
15+
COPY tester.sh /
16+
17+
CMD ["sh", "/tester.sh"]

scripts/tester/tester.sh

Lines changed: 97 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,97 @@
1+
#!/bin/sh
2+
3+
# This script is designed to test functionality of data uploading & extraction.
4+
# Creates a dataset and upload files, submit for extraction to ncsa.file.digest, verify result, delete.
5+
6+
# Clowder URL and key to use as well as file can be defined below.
7+
# Needs to have 'jq' installed.
8+
9+
CLOWDER_URL=${CLOWDER_URL:-""}
10+
CLOWDER_KEY=${CLOWDER_KEY:-""}
11+
TARGET_FILE=${TARGET_FILE:-""}
12+
13+
# Slack token for notifications
14+
SLACK_TOKEN=${SLACK_TOKEN:-""}
15+
SLACK_CHANNEL=${SLACK_CHANNEL:-"#github"}
16+
SLACK_USER=${SLACK_USER:-"NCSA Build"}
17+
18+
19+
post_message() {
20+
printf "$1\n"
21+
if [ "${SLACK_TOKEN}" != "" -a "${SLACK_CHANNEL}" != "" ]; then
22+
url="https://hooks.slack.com/services/${SLACK_TOKEN}"
23+
txt=$( printf "$1\n" | tr '\n' "\\n" | sed 's/"/\\"/g' )
24+
payload="payload={\"channel\": \"${SLACK_CHANNEL}\", \"username\": \"${SLACK_USER}\", \"text\": \"${txt}\"}"
25+
result=$(curl -s -X POST --data-urlencode "${payload}" $url)
26+
fi
27+
}
28+
29+
# ------------------------ Create dataset ------------------------
30+
DATASET_ID=$(curl -s -X POST -H "Content-Type: application/json" \
31+
-d '{"name":"Temporary Test Dataset", "description":"Created automatically by test script."}' \
32+
$CLOWDER_URL/api/datasets/createempty?key=$CLOWDER_KEY)
33+
DATASET_ID=$(echo $DATASET_ID | jq '.id' | sed s/\"//g)
34+
echo "Dataset ID: $DATASET_ID"
35+
36+
# ------------------------ Upload file ------------------------
37+
FILE_ID=$(curl -X POST -F File=@$TARGET_FILE \
38+
$CLOWDER_URL/api/uploadToDataset/$DATASET_ID?key=$CLOWDER_KEY&extract=0)
39+
FILE_ID=$(echo $FILE_ID | jq '.id' | sed s/\"//g)
40+
echo "File ID: $FILE_ID"
41+
42+
# Validate upload
43+
FILE_UPLOADED=0
44+
RETRIES=0
45+
while [ $FILE_UPLOADED = 0 ]; do
46+
RESPONSE=$(curl -X GET -H "Content-Type: application/json" \
47+
$CLOWDER_URL/api/files/$FILE_ID/metadata?key=$CLOWDER_KEY)
48+
RESPONSE=$(echo $RESPONSE | jq '.status' | sed s/\"//g)
49+
if [ "$RESPONSE" = "PROCESSED" ]; then
50+
FILE_UPLOADED=1
51+
fi
52+
RETRIES=$((RETRIES+1))
53+
if [ $RETRIES = 12 ]; then
54+
echo "File upload not PROCESSED after 2 minutes. There may be a problem. Deleting dataset."
55+
curl -X DELETE $CLOWDER_URL/api/datasets/$DATASET_ID?key=$CLOWDER_KEY
56+
post_message "Upload+extract test script failing on $CLOWDER_URL\/files\/$FILE_ID (status is not PROCESSED)"
57+
exit 1
58+
fi
59+
echo "File upload not complete; checking again in 10 seconds."
60+
sleep 10
61+
done
62+
echo "File upload complete."
63+
64+
# ------------------------ Submit for extraction ------------------------
65+
curl -X POST -H "Content-Type: application/json" \
66+
-d '{"extractor": "ncsa.file.digest"}' \
67+
$CLOWDER_URL/api/files/$FILE_ID/extractions?key=$CLOWDER_KEY
68+
69+
# Validate extraction
70+
FILE_EXTRACTED=0
71+
RETRIES=0
72+
while [ $FILE_EXTRACTED -eq 0 ]; do
73+
RESPONSE=$(curl -X GET -H "Content-Type: application/json" \
74+
$CLOWDER_URL/api/extractions/$FILE_ID/status?key=$CLOWDER_KEY)
75+
echo $RESPONSE
76+
RESPONSE=$(echo $RESPONSE | jq '."ncsa.file.digest"' | sed s/\"//g)
77+
if [ "$RESPONSE" = "DONE" ]; then
78+
FILE_EXTRACTED=1
79+
post_message "Extractor: [ncsa.file.digest] success $CLOWDER_URL/files/$FILE_ID"
80+
fi
81+
RETRIES=$((RETRIES+1))
82+
if [ $RETRIES = 24 ]; then
83+
echo "File extraction not DONE after 4 minutes. There may be a problem. Deleting dataset."
84+
curl -X DELETE $CLOWDER_URL/api/datasets/$DATASET_ID?key=$CLOWDER_KEY
85+
post_message "Upload+extract test script failing on $CLOWDER_URL/files/$FILE_ID (extractor not DONE)"
86+
exit 1
87+
fi
88+
echo "File extraction not complete; checking again in 10 seconds."
89+
sleep 10
90+
done
91+
echo "File extraction complete."
92+
93+
94+
# ------------------------ Delete dataset ------------------------
95+
curl -X DELETE $CLOWDER_URL/api/datasets/$DATASET_ID?key=$CLOWDER_KEY
96+
97+
echo "Test complete."

0 commit comments

Comments
 (0)