Skip to content

Commit ca73477

Browse files
author
Ruben L. Mendoza
authored
Merge pull request #230 from developmentseed/azure/storage
Supporting Azure/storage and updating env vars for replication files
2 parents 0c8550b + b354005 commit ca73477

File tree

21 files changed

+279
-121
lines changed

21 files changed

+279
-121
lines changed

compose/db-backup-restore.yml

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -13,11 +13,9 @@ services:
1313
command: >
1414
/bin/bash -c "
1515
echo 'Set cron job for backing up DB, every 4 minutes';
16-
while :; do sleep 4m; echo 'Creating DB backup...'; /start.sh; done;
16+
while :; do echo 'Creating DB backup...'; /start.sh; sleep 4m; done;
1717
"
1818
env_file:
1919
- ../envs/.env.db
2020
- ../envs/.env.db-utils
2121
- ../envs/.env.cloudprovider
22-
depends_on:
23-
- db

compose/full-history.yml

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -13,11 +13,10 @@ services:
1313
command: >
1414
/bin/bash -c "
1515
echo 'Set cron job for full history PBF file, every 4 minutes';
16-
while :; do sleep 4m; echo 'Creating full history PBF file...'; /start.sh; done;
16+
while :; do echo 'Creating full history PBF file...'; /start.sh; sleep 4m; done;
1717
"
1818
env_file:
1919
- ../envs/.env.db
2020
- ../envs/.env.db-utils
2121
- ../envs/.env.cloudprovider
22-
depends_on:
23-
- db
22+

compose/planet-dump.yml

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -21,11 +21,10 @@ services:
2121
command: >
2222
/bin/bash -c "
2323
echo 'Set cronjob for planet-dump, every 4 minutes';
24-
while :; do sleep 4m; echo 'Creating the planet dump file...'; /start.sh; done;
24+
while :; do echo 'Creating the planet dump file...'; /start.sh; sleep 4m; done;
2525
"
2626
env_file:
2727
- ../envs/.env.db
2828
- ../envs/.env.db-utils
2929
- ../envs/.env.cloudprovider
30-
depends_on:
31-
- db
30+

compose/replication.yml

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -21,5 +21,4 @@ services:
2121
- ../envs/.env.db
2222
- ../envs/.env.db-utils
2323
- ../envs/.env.cloudprovider
24-
depends_on:
25-
- db
24+

envs/.env.cloudprovider.example

Lines changed: 9 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -3,13 +3,19 @@
33
# It depends on CLOUD PROVIDER we will choose options are: "aws", "gcp", "local"
44
#######################################
55

6-
# CLOUDPROVIDER=("aws", "gcp", "local")
6+
# CLOUDPROVIDER=("aws", "gcp", "azure" or "local")
77
CLOUDPROVIDER=local
88

99
# AWS
1010
AWS_ACCESS_KEY_ID=xxx.xxx
1111
AWS_SECRET_ACCESS_KEY=yyy.yy
12-
AWS_S3_BUCKET=s3://osmseed-staging
12+
AWS_S3_BUCKET=osmseed-staging
1313

1414
# GCP
15-
GCP_STORAGE_BUCKET=gs://osm-seed
15+
GCP_STORAGE_BUCKET=osm-seed
16+
17+
# Azure
18+
AZURE_STORAGE_ACCOUNT=osmseed
19+
AZURE_CONTAINER_NAME=osm-seed
20+
AZURE_STORAGE_ACCESS_KEY=...
21+
AZURE_STORAGE_CONNECTION_STRING=...

envs/.env.db-utils.example

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,7 @@ OVERWRITE_PLANET_FILE=false
1919
#######################################
2020

2121
# TODO:update start.sh and helm templates, for making this customizable
22-
REPLICATION_FOLDER=/replication/minute
22+
REPLICATION_FOLDER=replication/minute
2323

2424
#######################################
2525
# Environment variables for minute Replication job settings

images/backup-restore/Dockerfile

Lines changed: 6 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,11 +4,16 @@ RUN apt-get install -y \
44
curl \
55
postgresql-client
66

7-
# Install AWS and GCP cli
7+
# Install AWS CLI
88
RUN pip install awscli
9+
10+
# Install GCP CLI
911
RUN curl -sSL https://sdk.cloud.google.com | bash
1012
RUN ln -f -s /root/google-cloud-sdk/bin/gsutil /usr/bin/gsutil
1113

14+
# Install Azure CLI
15+
RUN curl -sL https://aka.ms/InstallAzureCLIDeb | bash
16+
1217
VOLUME /mnt/data
1318
COPY ./start.sh /
1419
CMD /start.sh

images/backup-restore/start.sh

Lines changed: 32 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -3,32 +3,51 @@ export PGPASSWORD=$POSTGRES_PASSWORD
33
export VOLUME_DIR=/mnt/data
44

55
date=$(date '+%y%m%d_%H%M')
6-
backupFile=$VOLUME_DIR/osmseed-db-${date}.sql.gz
6+
local_backupFile=$VOLUME_DIR/osmseed-db-${date}.sql.gz
7+
cloud_backupFile=database/osmseed-db-${date}.sql.gz
78
stateFile=$VOLUME_DIR/state.txt
89
restoreFile=$VOLUME_DIR/backup.sql.gz
910

1011
echo "Start...$DB_ACTION action"
1112
# Backing up DataBase
1213
if [ "$DB_ACTION" == "backup" ]; then
1314
# Backup database and make maximum compression at the slowest speed
14-
pg_dump -h $POSTGRES_HOST -U $POSTGRES_USER $POSTGRES_DB | gzip -9 >$backupFile
15+
pg_dump -h $POSTGRES_HOST -U $POSTGRES_USER $POSTGRES_DB | gzip -9 >$local_backupFile
1516

1617
# AWS
1718
if [ "$CLOUDPROVIDER" == "aws" ]; then
18-
# Upload to S3
19-
aws s3 cp $backupFile $AWS_S3_BUCKET/database/$backupFile
20-
# The file state.txt contain the latest version of DB path
21-
echo "$AWS_S3_BUCKET/database/$backupFile" > $stateFile
22-
aws s3 cp $stateFile $AWS_S3_BUCKET/database/$stateFile
19+
echo "$AWS_S3_BUCKET/$cloud_backupFile" > $stateFile
20+
# Upload db backup file
21+
aws s3 cp $local_backupFile $AWS_S3_BUCKET/$cloud_backupFile
22+
# Upload state.txt file
23+
aws s3 cp $stateFile $AWS_S3_BUCKET/database/state.txt
2324
fi
2425

25-
# Google Storage
26+
# GCP
2627
if [ "$CLOUDPROVIDER" == "gcp" ]; then
27-
# Upload to GS
28-
gsutil cp $backupFile $GCP_STORAGE_BUCKET/database/$backupFile
29-
# The file state.txt contain the latest version of DB path
30-
echo "$GCP_STORAGE_BUCKET/database/$backupFile" >$stateFile
31-
gsutil cp $stateFile $GCP_STORAGE_BUCKET/database/$stateFile
28+
echo "$GCP_STORAGE_BUCKET/$cloud_backupFile" > $stateFile
29+
# Upload db backup file
30+
gsutil cp $local_backupFile $GCP_STORAGE_BUCKET/$cloud_backupFile
31+
# Upload state.txt file
32+
gsutil cp $stateFile $GCP_STORAGE_BUCKET/database/state.txt
33+
fi
34+
35+
# Azure
36+
if [ "$CLOUDPROVIDER" == "azure" ]; then
37+
# Save the path file
38+
echo "blob://$AZURE_STORAGE_ACCOUNT/$AZURE_CONTAINER_NAME/$cloud_backupFile" > $stateFile
39+
# Upload db backup file
40+
az storage blob upload \
41+
--container-name $AZURE_CONTAINER_NAME \
42+
--file $local_backupFile \
43+
--name $cloud_backupFile \
44+
--output table
45+
# Upload state.txt file
46+
az storage blob upload \
47+
--container-name $AZURE_CONTAINER_NAME \
48+
--file $stateFile \
49+
--name database/state.txt \
50+
--output table
3251
fi
3352
fi
3453

images/full-history/Dockerfile

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
FROM developmentseed/osmseed-osm-processor:0.1.0-n487.hbce9469
1+
FROM developmentseed/osmseed-osm-processor:v2
22

33
VOLUME /mnt/data
44
COPY ./start.sh /

images/full-history/start.sh

Lines changed: 35 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -11,10 +11,13 @@ fi
1111

1212
# Fixing name for historical file
1313
date=$(date '+%y%m%d_%H%M')
14-
fullHistoryFile=$VOLUME_DIR/history-${date}.osh.pbf
14+
local_fullHistoryFile=$VOLUME_DIR/history-${date}.osh.pbf
15+
cloud_fullHistoryFile=planet/full-history/history-${date}.osh.pbf
16+
1517
# In case overwrite the file
1618
if [ "$OVERWRITE_FHISTORY_FILE" == "true" ]; then
17-
fullHistoryFile=$VOLUME_DIR/history-latest.osh.pbf
19+
local_fullHistoryFile=$VOLUME_DIR/history-latest.osh.pbf
20+
cloud_fullHistoryFile=planet/full-history/history-latest.osh.pbf
1821
fi
1922

2023
# State file nname
@@ -34,25 +37,45 @@ osmosis --read-apidb-change \
3437
$osm_tmp_file
3538

3639
# Convert file to PBF file
37-
osmium cat $osm_tmp_file -o $fullHistoryFile
38-
osmium fileinfo $fullHistoryFile
40+
osmium cat $osm_tmp_file -o $local_fullHistoryFile
41+
osmium fileinfo $local_fullHistoryFile
3942

4043
# Remove full-hitory osm file, keep only history-latest.osh.pbf files
4144
rm $osm_tmp_file
4245

4346
# AWS
4447
if [ $CLOUDPROVIDER == "aws" ]; then
4548
AWS_URL=${AWS_S3_BUCKET/s3:\/\//http:\/\/}
46-
echo "$AWS_URL.s3.amazonaws.com/planet/full-history/$fullHistoryFile" >$stateFile
47-
# Upload to S3
48-
aws s3 cp $fullHistoryFile $AWS_S3_BUCKET/planet/full-history/$fullHistoryFile --acl public-read
49-
aws s3 cp $stateFile $AWS_S3_BUCKET/planet/full-history/$stateFile --acl public-read
49+
echo "$AWS_URL.s3.amazonaws.com/$cloud_fullHistoryFile" >$stateFile
50+
# Upload history-planet.osm.pbf
51+
aws s3 cp $local_fullHistoryFile $AWS_S3_BUCKET/$cloud_fullHistoryFile --acl public-read
52+
# Upload state.txt
53+
aws s3 cp $stateFile $AWS_S3_BUCKET/planet/full-history/state.txt --acl public-read
5054
fi
5155

5256
# Google Storage
5357
if [ $CLOUDPROVIDER == "gcp" ]; then
54-
echo "https://storage.cloud.google.com/$GCP_STORAGE_BUCKET/planet/full-history/$fullHistoryFile" >$stateFile
55-
# Upload to GS
56-
gsutil cp -a public-read $fullHistoryFile $GCP_STORAGE_BUCKET/planet/full-history/$fullHistoryFile
57-
gsutil cp -a public-read $stateFile $GCP_STORAGE_BUCKET/planet/full-history/$stateFile
58+
echo "https://storage.cloud.google.com/$GCP_STORAGE_BUCKET/$cloud_fullHistoryFile" >$stateFile
59+
# Upload history-planet.osm.pbf
60+
gsutil cp -a public-read $local_fullHistoryFile $GCP_STORAGE_BUCKET/$cloud_fullHistoryFile
61+
# Upload state.txt
62+
gsutil cp -a public-read $stateFile $GCP_STORAGE_BUCKET/planet/full-history/state.txt
63+
fi
64+
65+
# Azure
66+
if [ $CLOUDPROVIDER == "azure" ]; then
67+
# Save the path file
68+
echo "https://$AZURE_STORAGE_ACCOUNT.blob.core.windows.net/$AZURE_CONTAINER_NAME/$cloud_fullHistoryFile" >$stateFile
69+
# Upload history-planet.osm.pbf
70+
az storage blob upload \
71+
--container-name $AZURE_CONTAINER_NAME \
72+
--file $local_fullHistoryFile \
73+
--name $cloud_fullHistoryFile \
74+
--output table
75+
# Upload state.txt
76+
az storage blob upload \
77+
--container-name $AZURE_CONTAINER_NAME \
78+
--file $stateFile \
79+
--name planet/full-history/state.txt \
80+
--output table
5881
fi

0 commit comments

Comments
 (0)