Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 4 additions & 0 deletions 02_lakefs_enterprise/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,10 @@ Login to [Treeverse Dockerhub](https://hub.docker.com/u/treeverse) by using the
docker login -u externallakefs
```

If you want to use lakeFS [Multiple Storage Backends](https://docs.lakefs.io/latest/howto/multiple-storage-backends/) feature then change "lakeFS-samples/02_lakefs_enterprise/docker-compose.yml" file to update credentials for AWS S3 and/or Azure Blob Storage. If you want to use Google Cloud Storage (GCS) then copy GCP Service Account key JSON file to "lakeFS-samples/02_lakefs_enterprise" folder and change the file name in Docker Compose file. Refer to [Multiple Storage Backends documentation](https://docs.lakefs.io/latest/howto/multiple-storage-backends/) for additional information.

If you DO NOT want to use lakeFS Multiple Storage Backends feature then don't change the Docker Compose file.

Copy the lakeFS license file to "lakeFS-samples/02_lakefs_enterprise" folder, then change lakeFS license file name and installation ID in the following command and run the command to provision a lakeFS Enterprise server as well as MinIO for your object store, plus Jupyter:

```bash
Expand Down
99 changes: 93 additions & 6 deletions 02_lakefs_enterprise/docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -25,11 +25,98 @@ services:
minio-setup:
condition: service_completed_successfully
environment:
- LAKEFS_BLOCKSTORE_TYPE=s3
- LAKEFS_BLOCKSTORE_S3_FORCE_PATH_STYLE=true
- LAKEFS_BLOCKSTORE_S3_ENDPOINT=http://minio:9000
- LAKEFS_BLOCKSTORE_S3_CREDENTIALS_ACCESS_KEY_ID=minioadmin
- LAKEFS_BLOCKSTORE_S3_CREDENTIALS_SECRET_ACCESS_KEY=minioadmin
- LAKEFS_BLOCKSTORES_SIGNING_SECRET_KEY=some random secret string
- |
LAKEFS_BLOCKSTORES_STORES=
[
{
"id":"minio",
"backward_compatible":true,
"description":"Primary on-prem MinIO storage for lakeFS Samples",
"type":"s3",
"s3":
{
"endpoint":"http://minio:9000",
"force_path_style":true,
"credentials":
{
"access_key_id":"minioadmin",
"secret_access_key":"minioadmin"
}
}
},
{
"id":"local",
"description":"POSIX complaint Local storage",
"type":"local",
"local":
{
"path":"/tmp/local1",
"import_enabled":true
}
},
{
"id":"s3-us-east-1",
"description":"AWS S3 storage for production data",
"type":"s3",
"s3":
{
"region":"us-east-1",
"credentials":
{
"access_key_id":"AWS access key",
"secret_access_key":"AWS secret key"
}
}
},
{
"id":"s3-us-west-2",
"description":"AWS S3 storage for development data",
"type":"s3",
"s3":
{
"region":"us-west-2",
"credentials":
{
"access_key_id":"AWS access key",
"secret_access_key":"AWS secret key"
}
}
},
{
"id":"s3-eu-west-3",
"description":"AWS S3 storage for data for European customers",
"type":"s3",
"s3":
{
"region":"eu-west-3",
"credentials":
{
"access_key_id":"AWS access key",
"secret_access_key":"AWS secret key"
}
}
},
{
"id":"azure-analytics",
"description":"Azure Blob storage for analytics data",
"type":"azure",
"azure":
{
"storage_account":"Azure storage account name",
"storage_access_key":"Azure storage account access key"
}
},
{
"id":"gcs-dev",
"description":"Google Cloud Storage for development data",
"type":"gs",
"gs":
{
"credentials_file":"/tmp/lakefs/gcp_service_account_key_file_name.json"
}
}
]
- LAKEFS_AUTH_ENCRYPT_SECRET_KEY=some random secret string
- LAKEFS_LOGGING_LEVEL=INFO
- LAKEFS_STATS_ENABLED=${LAKEFS_STATS_ENABLED:-1}
Expand All @@ -50,7 +137,7 @@ services:
lakefs setup --user-name everything-bagel --access-key-id "$$LAKECTL_CREDENTIALS_ACCESS_KEY_ID" --secret-access-key "$$LAKECTL_CREDENTIALS_SECRET_ACCESS_KEY" || true
lakefs run &
echo "---- Creating repository ----"
wait-for -t 60 lakefs:8000 -- lakectl repo create lakefs://quickstart s3://quickstart --sample-data || true
wait-for -t 60 lakefs:8000 -- lakectl repo create lakefs://quickstart s3://quickstart --storage-id minio --sample-data || true
echo ""
wait-for -t 60 minio:9000 && echo '------------------------------------------------

Expand Down
12 changes: 12 additions & 0 deletions 02_lakefs_enterprise/gcp_service_account_key_file_name.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
{
"type": "service_account",
"project_id": "test",
"private_key_id": "123",
"private_key": "-----BEGIN PRIVATE KEY-----\ngfgf\n-----END PRIVATE KEY-----\n",
"client_email": "[email protected]",
"client_id": "123",
"auth_uri": "https://accounts.google.com/o/oauth2/auth",
"token_uri": "https://oauth2.googleapis.com/token",
"auth_provider_x509_cert_url": "https://www.googleapis.com/oauth2/v1/certs",
"client_x509_cert_url": "https://www.googleapis.com/robot/v1/metadata/x509/test%test.iam.gserviceaccount.com"
}