diff --git a/multicloud/azure-cli/README.md b/multicloud/azure-cli/README.md
index 37da994..4d867f1 100644
--- a/multicloud/azure-cli/README.md
+++ b/multicloud/azure-cli/README.md
@@ -1,7 +1,7 @@
# Oracle Database@Azure: Create an Autonomous Database
There are different ways that you can deploy a new Oracle Autonomous Database:
* [Using the Azure Portal](https://youtu.be/QOCvRr5CfeQ)
-* [Using Terraform scripts](https://github.com/oci-landing-zones/terraform-oci-multicloud-azure/tree/main)
+* [Using Terraform scripts](https://github.com/oci-landing-zones/terraform-oci-multicloud-azure)
* Using the Azure CLI
The steps below show how to create an Autonomous Database using the Azure CLI.
@@ -29,6 +29,8 @@ You can run the scripts independently or run `create-all-resources.sh`. Simply u
|[create-all-resources.sh](create-all-resources.sh)|Creates your resource group, network, ADB and VM|
|[create-data-lake-storage.sh](create-data-lake-storage.sh)|Creates an Azure Data Lake Gen 2 storage account, a container and uploads sample data into that container|
|[delete-all-resources.sh](delete-all-resources.sh)|Deletes your resource group, network, ADB and VM|
+|[show-adb-info.sh](show-adb-info.sh)|Shows information about your ADB - including you JDBC connection details to the HIGH service|
+|[show-data-lake-storage-info.sh](show-data-lake-storage-info.sh)|Shows information about your data lake storage - including the storage endpoint URL|
### Configuration file
The Azure cli deployment scripts rely on settings found in the config file. These resources **will be created** by the scripts. Update the config file prior to running any of the scripts.
@@ -80,6 +82,8 @@ Connect to your Autonomous Database!
* Use these great VS Code extensions that help you develop and debug your database apps:
* SQL Developer for VS Code ([Learn More](https://www.oracle.com/database/sqldeveloper/vscode/) | [Marketplace](https://marketplace.visualstudio.com/items?itemName=Oracle.sql-developer))
* Oracle Developer Tools for VS Code ([Learn More](https://docs.oracle.com/en/database/oracle/developer-tools-for-vscode/getting-started/gettingstarted.html) | [Marketplace](https://marketplace.visualstudio.com/items?itemName=Oracle.oracledevtools))
+* [Use the sample scripts](../../sql/README.md) to learn how to use different features - like Select AI, data lake integration, JSON, and more.
+
#### JDBC Example:
JDBC is a common way to connect to Autonomous Database. For example, you can use the **Custom JDBC URL** in the VS Code SQL Developer Extension:
diff --git a/multicloud/azure-cli/config b/multicloud/azure-cli/config
index 1a8375e..9b51423 100644
--- a/multicloud/azure-cli/config
+++ b/multicloud/azure-cli/config
@@ -2,38 +2,49 @@
# Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl/
# update the values below to match your requirements
-LOCATION="eastus"
-RESOURCE_GROUP="development"
+# Region and resource groupwhere resources are deployed
+# example: eastus
+LOCATION=""
+RESOURCE_GROUP=""
ADB_NAME="quickstart"
## NETWORKING
# database
-VNET_NAME="dev-vnet"
-VNET_PREFIX="19x.xxx.0.0/16"
+# example: dev-vnet
+VNET_NAME=""
+# example: 192.168.0.0/16
+VNET_PREFIX=""
# subnet for the database
-SUBNET_NAME="dev-sn-db"
-SUBNET_PREFIX="19x.xxx.1.0/24"
+# example: dev-sn-db
+SUBNET_NAME=""
+# example: 192.168.1.0/24
+SUBNET_PREFIX=""
# client subnet
-SUBNET2_NAME="dev-sn-client"
-SUBNET2_PREFIX="19x.xxx.2.0/24"
+# example: dev-sn-client
+SUBNET2_NAME=""
+# example: 192.168.2.0/24
+SUBNET2_PREFIX=""
#network security group
NSG_NAME=$SUBNET2_NAME-nsg
## COMPUTE VM
-VM_NAME="adb-vm-client"
+# example: adb-vm-client
+VM_NAME=""
VM_PREFERRED_SIZES=( "Standard_DS3_v2" "Standard_DC1s_v2" "Standard_DC2s_v2" "Standard_DC2ads_v5" "Standard_L4s" )
VM_IMAGE="MicrosoftWindowsDesktop:Windows-11:win11-22h2-pro:latest"
## CLOUD STORAGE
# Storage accounts require a unique name across azure. Enter your unique name below.
-STORAGE_ACCOUNT_NAME="your-storage-account"
+# example: devadbstorageacct
+STORAGE_ACCOUNT_NAME=""
STORAGE_CONTAINER_NAME="adb-sample"
## IDENTITIES
# This identity will be used for your VM. The password will also be used for the database ADMIN user
USER_NAME="adb"
---The password must be between 12 and 30 characters long and must include at least one uppercase letter, one lowercase letter, and one numeric character
+# The password must be between 12 and 30 characters long and must include at least one uppercase letter, one lowercase letter, and one numeric character
+# example: watchS0meMovies#
USER_PASSWORD=""
\ No newline at end of file
diff --git a/multicloud/azure-cli/config.default b/multicloud/azure-cli/config.default
index 1a8375e..9b51423 100644
--- a/multicloud/azure-cli/config.default
+++ b/multicloud/azure-cli/config.default
@@ -2,38 +2,49 @@
# Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl/
# update the values below to match your requirements
-LOCATION="eastus"
-RESOURCE_GROUP="development"
+# Region and resource groupwhere resources are deployed
+# example: eastus
+LOCATION=""
+RESOURCE_GROUP=""
ADB_NAME="quickstart"
## NETWORKING
# database
-VNET_NAME="dev-vnet"
-VNET_PREFIX="19x.xxx.0.0/16"
+# example: dev-vnet
+VNET_NAME=""
+# example: 192.168.0.0/16
+VNET_PREFIX=""
# subnet for the database
-SUBNET_NAME="dev-sn-db"
-SUBNET_PREFIX="19x.xxx.1.0/24"
+# example: dev-sn-db
+SUBNET_NAME=""
+# example: 192.168.1.0/24
+SUBNET_PREFIX=""
# client subnet
-SUBNET2_NAME="dev-sn-client"
-SUBNET2_PREFIX="19x.xxx.2.0/24"
+# example: dev-sn-client
+SUBNET2_NAME=""
+# example: 192.168.2.0/24
+SUBNET2_PREFIX=""
#network security group
NSG_NAME=$SUBNET2_NAME-nsg
## COMPUTE VM
-VM_NAME="adb-vm-client"
+# example: adb-vm-client
+VM_NAME=""
VM_PREFERRED_SIZES=( "Standard_DS3_v2" "Standard_DC1s_v2" "Standard_DC2s_v2" "Standard_DC2ads_v5" "Standard_L4s" )
VM_IMAGE="MicrosoftWindowsDesktop:Windows-11:win11-22h2-pro:latest"
## CLOUD STORAGE
# Storage accounts require a unique name across azure. Enter your unique name below.
-STORAGE_ACCOUNT_NAME="your-storage-account"
+# example: devadbstorageacct
+STORAGE_ACCOUNT_NAME=""
STORAGE_CONTAINER_NAME="adb-sample"
## IDENTITIES
# This identity will be used for your VM. The password will also be used for the database ADMIN user
USER_NAME="adb"
---The password must be between 12 and 30 characters long and must include at least one uppercase letter, one lowercase letter, and one numeric character
+# The password must be between 12 and 30 characters long and must include at least one uppercase letter, one lowercase letter, and one numeric character
+# example: watchS0meMovies#
USER_PASSWORD=""
\ No newline at end of file
diff --git a/multicloud/azure-cli/create-data-lake-storage.sh b/multicloud/azure-cli/create-data-lake-storage.sh
index 2771cc4..e128417 100755
--- a/multicloud/azure-cli/create-data-lake-storage.sh
+++ b/multicloud/azure-cli/create-data-lake-storage.sh
@@ -62,7 +62,7 @@ echo "Upload files to that directory"
az storage fs directory upload \
--account-name $STORAGE_ACCOUNT_NAME \
--file-system $STORAGE_CONTAINER_NAME \
- --source "../sql/support-site/*" \
+ --source "../../sql/support-site/*" \
--destination-path support-site \
--recursive \
--auth-mode login
@@ -78,7 +78,7 @@ echo "Upload files to that directory"
az storage fs directory upload \
--account-name $STORAGE_ACCOUNT_NAME \
--file-system $STORAGE_CONTAINER_NAME \
- --source "../sql/data/*" \
+ --source "../../sql/data/*" \
--destination-path data \
--recursive \
--auth-mode login
@@ -99,8 +99,10 @@ az storage account keys list \
--resource-group $RESOURCE_GROUP --query "[0].value" -o tsv
echo "Storage URL:"
-az storage account show \
+STORAGE_URL=$(az storage account show \
--name $STORAGE_ACCOUNT_NAME \
--query primaryEndpoints.blob \
- --output tsv
+ --output tsv)
+echo $STORAGE_URL$STORAGE_CONTAINER_NAME
+
echo ""
diff --git a/multicloud/azure-cli/show-data-lake-storage-info.sh b/multicloud/azure-cli/show-data-lake-storage-info.sh
index 7e53dfa..bef69fa 100755
--- a/multicloud/azure-cli/show-data-lake-storage-info.sh
+++ b/multicloud/azure-cli/show-data-lake-storage-info.sh
@@ -15,9 +15,10 @@ az storage account keys list \
--account-name $STORAGE_ACCOUNT_NAME \
--resource-group $RESOURCE_GROUP --query "[0].value" -o tsv
echo "Storage URL:"
-az storage account show \
+STORAGE_URL=$(az storage account show \
--name $STORAGE_ACCOUNT_NAME \
--query primaryEndpoints.blob \
- --output tsv
+ --output tsv)
+echo $STORAGE_URL$STORAGE_CONTAINER_NAME
echo ""
diff --git a/multicloud/gcloud-cli/README.md b/multicloud/gcloud-cli/README.md
new file mode 100644
index 0000000..bfe057e
--- /dev/null
+++ b/multicloud/gcloud-cli/README.md
@@ -0,0 +1,105 @@
+# Oracle Database@Google Cloud: Create an Autonomous Database
+There are different ways that you can deploy a new Oracle Autonomous Database:
+* [Using the Google Cloud Portal](https://docs.oracle.com/en-us/iaas/ogadb/ogadb-provisioning-autonomous-database.html)
+* [Using Terraform scripts](https://github.com/oci-landing-zones/terraform-oci-multicloud-azure)
+* Using the Google Command Line Interace (gcloud CLI)
+
+
+The steps below show how to create an Autonomous Database using the gcloud CLI.
+
+## Prerequisites:
+* [Install the gcloud CLI](https://cloud.google.com/sdk/docs/install)
+* [Onboard Oracle Database@Google Cloud](https://docs.oracle.com/en-us/iaas/Content/database-at-gcp/oagcp-onboard.htm#oagcp_onboard). Onboarding includes subscribing to the service using a Pay as You Go or private offer, setting up permissions, and more.
+
+## Deploy your Autonomous Database and your infrastructure
+The gcloud CLI scripts will deploy the following infrastructure:
+* A VPC Network with a client subnet
+* An Oracle Autonomous Database. It is deployed to a private subnet on that VPC Network. That private subnet is managed by Oracle Database@Google Cloud.
+* A Windows-based Virtual Machine is deployed to the client subnet. You can RDP to that VM to develop your apps and access Autonomous Database.
+* A Cloud Storage bucket with sample data
+* Sample code will use Google Gemini. Ensure the API is enabled.
+
+
+
+
+**Note:** Gemini is used by the samples - but the scripts do not set up access to the resource. See the [Generative AI on Vertex Quickstart](https://cloud.google.com/vertex-ai/generative-ai/docs/start/quickstarts/quickstart-multimodal?authuser=1)
+
+You can run the scripts independently or run `create-all-resources.sh`. Simply update the [`config`](#configuration-file) prior to running the scripts:
+
+|Script|Description|
+|----|---|
+|[create-network.sh](create-network.sh)|Creates a VPC Network and subnet with required firewall rules. ADB is accessed thru a private endpoint on this network. The VM is deployed to this network and can be used to work with ADB.|
+|[create-adb.sh](create-adb.sh)|Create an Autonomous Database|
+|[create-compute-vm.sh](create-compute-vm.sh)|Create a VM in that VPC. By default, a Windows VM is created and can be accessed via RDP. After running this script, you can set up the password by running:
`source config`
`gcloud compute reset-windows-password $VM_NAME --zone=$REGION-a`|
+|[create-all-resources.sh](create-all-resources.sh)|Creates your network, ADB, VM and Cloud Storage bucket.|
+|[create-data-lake-storage.sh](create-data-lake-storage.sh)|Creates a bucket on Cloud Storage and uploads sample data into that bucket|
+|[delete-all-resources.sh](delete-all-resources.sh)|Deletes your resource group, network, ADB and VM|
+|[show-adb-info.sh](show-adb-info.sh)|Shows information about your ADB - including you JDBC connection details to the HIGH service|
+|[show-data-lake-storage-info.sh](show-data-lake-storage-info.sh)|Shows information about your data lake storage - including the storage endpoint URL|
+
+### Configuration file
+The gcloud CLI deployment scripts rely on settings found in the config file. These resources **will be created** by the scripts. Update the config file prior to running any of the scripts.
+
+>**IMPORTANT:** This file will contain a password that is used to connect to Autonomous Database and the virtual machine. Set the file's permissions so that only the file's owner can view its contents:
+```bash
+chmod 600 config
+```
+
+|Setting|Description|Example|
+|----|----|----|
+|REGION|Region where resources will be deployed. [See documentation](https://docs.oracle.com/en-us/iaas/Content/database-at-gcp/oagcp-regions.htm) for region availability|"us-east4"|
+|PROJECT|Target Google Cloud project for new resources|"development"|
+|USER_PASSWORD|The password for the Autonomous Database admin user|"watchS0meMovies#"|
+|ADB_NAME|Autonomous Database name. This name must be unique within a region location|"quickstart"|
+|SUBNET_DB_IP_RANGE|IP address range used for ADB. It can not overlap with the client subnet range. It can overlap with other ADB instances.|"192.168.11.0/24"|
+|VPC_NETWORK_NAME|Name of the VPC Network|"dev-network"|
+|VNET_PREFIX|CIDR range for the virtual network|"192.168.0.0/16"|
+|SUBNET_CLIENT_NAME|Name of the client subnet where the VM is deployed|"dev-sn-client"|
+|SUBNET_CLIENT_IP_RANGE|CIDR range for the client subnet|"192.168.10.0/24"|
+|VM_NAME|Name of the virtual machine|"dev-vm-client"|
+|VM_IMAGE_FAMILY|The image deployed to the VM |"windows-2022"|
+|VM_MACHINE_TYPE|The type of VM deployed|"e2-standard-4"|
+|BUCKET_NAME|The name of the cloud storage bucket where sample files will be uploaded.|"adb-sample-quickstart"|
+
+
+### Using the scripts
+Make sure that you have enabled APIs for your project. [See the documentation](https://cloud.google.com/endpoints/docs/openapi/enable-api) for details.
+
+Log into Google Cloud from the CLI:
+```bash
+gcloud auth login
+```
+
+Update the config file
+Then, run your scripts. The following will deploy a complete environment, but you can also install independent components. Just make sure you install dependencies (e.g. a VCN prior to Autonomous Database):
+
+Creating all of the resources will take approximately 15-20 minutes.
+
+```bash
+./create-all-resources.sh
+```
+
+Check for errors after running the script. For example, VM availability can impact the success of creating the resource. If there is an issue, simply rerun the script that creates the resource (note: you may need to update the config file).
+
+## What's next
+Connect to your Autonomous Database!
+* [Learn about connectivity options](https://docs.oracle.com/en/cloud/paas/autonomous-database/serverless/adbsb/connect-preparing.html)
+* Use these great VS Code extensions that help you develop and debug your database apps:
+ * SQL Developer for VS Code ([Learn More](https://www.oracle.com/database/sqldeveloper/vscode/) | [Marketplace](https://marketplace.visualstudio.com/items?itemName=Oracle.sql-developer))
+ * Oracle Developer Tools for VS Code ([Learn More](https://docs.oracle.com/en/database/oracle/developer-tools-for-vscode/getting-started/gettingstarted.html) | [Marketplace](https://marketplace.visualstudio.com/items?itemName=Oracle.oracledevtools))
+* [Use the sample scripts](../../sql/README.md) to learn how to use different features - like Select AI, data lake integration, JSON, and more.
+
+
+#### JDBC Example:
+JDBC is a common way to connect to Autonomous Database. For example, you can use the **Custom JDBC URL** in the VS Code SQL Developer Extension:
+ 
+
+Notice the `jdbc:oracle:thin:@` prefix followed by a connection string. You can find the connection string in different ways.
+
+1. Go to your Autonomous Database blade in the Azure Portal and go to **Settings -> Connections**:
+ 
+2. Use the Azure cli script [`show-adb-info.sh`](./show-adb-info.sh). That script will return information about your Autonomous Database, including connection details.
+
+
+Copyright (c) 2024 Oracle and/or its affiliates.
+Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl/
diff --git a/multicloud/gcloud-cli/config b/multicloud/gcloud-cli/config
new file mode 100644
index 0000000..207f9bb
--- /dev/null
+++ b/multicloud/gcloud-cli/config
@@ -0,0 +1,46 @@
+# Copyright (c) 2024 Oracle and/or its affiliates.
+# Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl/
+
+# update the values below to match your requirements
+# example: us-east4
+REGION=""
+PROJECT=""
+
+## IDENTITIES
+# The password must be between 12 and 30 characters long and must include at least one uppercase letter, one lowercase letter, and one numeric character
+# example: watchS0meMovies#
+USER_PASSWORD=""
+
+## ADB properties
+# Database name. This will be used for the display name as well.
+ADB_NAME="quickstart"
+
+# ADB IP range. It can not overlap with the client subnet range
+# example: 192.168.11.0/24
+SUBNET_DB_IP_RANGE=""
+
+## NETWORKING
+# public client network
+# example: dev-network
+VPC_NETWORK_NAME=""
+
+# names firewall rules
+VPC_FIREWALL_INGRESS_NAME="allow-common-ingress-ports"
+VPC_FIREWALL_EGRESS_NAME="allow-client-egress-ports"
+
+# client subnet
+# example: dev-sn-client
+SUBNET_CLIENT_NAME=""
+# example: 192.168.10.0/24
+SUBNET_CLIENT_IP_RANGE=""
+
+## COMPUTE VM
+# example: dev-vm-client
+VM_NAME=""
+VM_IMAGE_FAMILY="windows-2022"
+VM_MACHINE_TYPE="e2-standard-4"
+
+## CLOUD STORAGE
+# Storage bucket require a unique name across google. Enter your unique name below.
+# example: adb-sample-quickstart
+BUCKET_NAME=""
\ No newline at end of file
diff --git a/multicloud/gcloud-cli/config.default b/multicloud/gcloud-cli/config.default
new file mode 100644
index 0000000..4de8d30
--- /dev/null
+++ b/multicloud/gcloud-cli/config.default
@@ -0,0 +1,46 @@
+# Copyright (c) 2024 Oracle and/or its affiliates.
+# Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl/
+
+# update the values below to match your requirements
+# example: us-east4
+REGION=""
+PROJECT=""
+
+## IDENTITIES
+# The password must be between 12 and 30 characters long and must include at least one uppercase letter, one lowercase letter, and one numeric character
+# example: watchS0meMovies#
+USER_PASSWORD=""
+
+## ADB properties
+# Database name. This will be used for the display name as well.
+ADB_NAME="quickstart"
+
+# ADB IP range. It can not overlap with the client subnet range
+# example: 192.168.11.0/24
+SUBNET_DB_IP_RANGE=""
+
+## NETWORKING
+# public client network
+# example: dev-network
+VPC_NETWORK_NAME=""
+
+# names firewall rules
+VPC_FIREWALL_INGRESS_NAME="allow-common-ingress-ports"
+VPC_FIREWALL_EGRESS_NAME="allow-client-egress-ports"
+
+# client subnet
+# example: dev-sn-client
+SUBNET_CLIENT_NAME=""
+# example: 192.168.10.0/24
+SUBNET_CLIENT_IP_RANGE=""
+
+## COMPUTE VM
+# example: dev-vm-client
+VM_NAME=""
+VM_IMAGE_FAMILY="windows-2022"
+VM_MACHINE_TYPE="e2-standard-4"
+
+## CLOUD STORAGE
+# Storage accounts require a unique name across google. Enter your unique name below.
+# example: adb-sample-quickstart
+BUCKET_NAME=""
\ No newline at end of file
diff --git a/multicloud/gcloud-cli/create-adb.sh b/multicloud/gcloud-cli/create-adb.sh
new file mode 100755
index 0000000..2a92e3a
--- /dev/null
+++ b/multicloud/gcloud-cli/create-adb.sh
@@ -0,0 +1,33 @@
+#!/bin/bash
+
+# Copyright (c) 2024 Oracle and/or its affiliates.
+# Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl/
+
+echo ""
+echo "##"
+echo "# create autonomous database"
+echo "##"
+echo ""
+# ensure you update the config file to match your deployment prior to running the deployment
+
+source ./config
+
+# ADB requires the IDs of the networking components
+ NETWORK_ID=`gcloud compute networks list --filter="name=$VPC_NETWORK_NAME" --format="get(id)"`
+
+gcloud oracle-database autonomous-databases create $ADB_NAME \
+ --location=$REGION \
+ --display-name=$ADB_NAME \
+ --database=$ADB_NAME \
+ --network=$NETWORK_ID \
+ --cidr=$SUBNET_DB_IP_RANGE \
+ --admin-password=$USER_PASSWORD \
+ --properties-compute-count=4 \
+ --properties-data-storage-size-gb=500 \
+ --properties-db-version=23ai \
+ --properties-license-type=LICENSE_INCLUDED \
+ --properties-db-workload=OLTP \
+ --properties-is-storage-auto-scaling-enabled \
+ --properties-is-auto-scaling-enabled \
+ --async
+
diff --git a/multicloud/gcloud-cli/create-all-resources.sh b/multicloud/gcloud-cli/create-all-resources.sh
new file mode 100755
index 0000000..c0cbe01
--- /dev/null
+++ b/multicloud/gcloud-cli/create-all-resources.sh
@@ -0,0 +1,9 @@
+# Copyright (c) 2024 Oracle and/or its affiliates.
+# Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl/
+
+set -e
+
+./create-network.sh
+./create-adb.sh
+./create-data-lake-storage.sh
+./create-compute-vm.sh
\ No newline at end of file
diff --git a/multicloud/gcloud-cli/create-compute-vm.sh b/multicloud/gcloud-cli/create-compute-vm.sh
new file mode 100755
index 0000000..6aa6e46
--- /dev/null
+++ b/multicloud/gcloud-cli/create-compute-vm.sh
@@ -0,0 +1,33 @@
+#!/bin/bash
+# Copyright (c) 2024 Oracle and/or its affiliates.
+# Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl/
+
+echo ""
+echo "##"
+echo "# create compute vm"
+echo "##"
+echo ""
+
+# ensure you update the config file to match your deployment prior to running the deployment
+source ./config
+
+## Create a windows vm
+echo "Creating compute instance $VM_NAME"
+
+gcloud compute instances create $VM_NAME \
+ --image-family $VM_IMAGE_FAMILY \
+ --image-project windows-cloud \
+ --machine-type e2-standard-4 \
+ --zone $REGION-a \
+ --network $VPC_NETWORK_NAME \
+ --network-tier=PREMIUM \
+ --subnet $SUBNET_CLIENT_NAME \
+ --boot-disk-size 50GB \
+ --boot-disk-type pd-ssd \
+ --enable-display-device \
+ --tags=bastion
+
+## Create it's password
+echo "Resetting the password for $VM_NAME"
+gcloud compute reset-windows-password $VM_NAME --zone=$REGION-a
+
diff --git a/multicloud/gcloud-cli/create-data-lake-storage.sh b/multicloud/gcloud-cli/create-data-lake-storage.sh
new file mode 100755
index 0000000..ed3f821
--- /dev/null
+++ b/multicloud/gcloud-cli/create-data-lake-storage.sh
@@ -0,0 +1,31 @@
+#!/bin/bash
+# Copyright (c) 2024 Oracle and/or its affiliates.
+# Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl/
+
+echo ""
+echo "##"
+echo "# Create a Google Cloud Storage bucket"
+echo "##"
+echo ""
+# ensure you update the config file to match your deployment prior to running the deployment
+
+source config
+
+echo "# Creating Google Cloud Storage bucket $STORAGE_ACCOUNT_NAME"
+gcloud storage buckets create gs://$BUCKET_NAME --location=$REGION --default-storage-class=STANDARD
+
+echo "# Uploading sample files"
+echo "Support site files"
+gcloud storage cp -r ../../sql/support-site gs://$BUCKET_NAME/support-site
+
+echo "Sample data sets"
+gcloud storage cp -r ../../sql/data gs://$BUCKET_NAME/data
+
+echo "Done."
+gcloud storage ls --long --recursive gs://$BUCKET_NAME
+
+echo ""
+echo "Bucket Name: $BUCKET_NAME"
+gcloud storage hmac list
+echo "Storage URL:"
+echo "https://storage.googleapis.com/$BUCKET_NAME"
diff --git a/multicloud/gcloud-cli/create-network.sh b/multicloud/gcloud-cli/create-network.sh
new file mode 100755
index 0000000..27798cb
--- /dev/null
+++ b/multicloud/gcloud-cli/create-network.sh
@@ -0,0 +1,40 @@
+#!/bin/bash
+# Copyright (c) 2024 Oracle and/or its affiliates.
+# Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl/
+
+echo ""
+echo "##"
+echo "# deploy network"
+echo "##"
+echo ""
+# ensure you update the config file to match your deployment prior to running the deployment
+source ./config
+
+# Create a VPC
+gcloud compute networks create $VPC_NETWORK_NAME --subnet-mode=custom
+
+gcloud compute networks subnets create $SUBNET_CLIENT_NAME \
+ --network=$VPC_NETWORK_NAME \
+ --region=$REGION \
+ --range=$SUBNET_CLIENT_IP_RANGE \
+ --enable-flow-logs \
+ --enable-private-ip-google-access
+
+gcloud compute firewall-rules create $VPC_FIREWALL_INGRESS_NAME \
+ --direction=INGRESS \
+ --priority=1000 \
+ --network=$VPC_NETWORK_NAME \
+ --action=ALLOW \
+ --rules=tcp:22,tcp:80,tcp:443,tcp:1522,tcp:3389 \
+ --source-ranges=0.0.0.0/0 \
+ --description="Allow SSH, HTTP, HTTPS, Autonomous DB, and RDP access"
+ --target-tags=bastion
+
+gcloud compute firewall-rules create $VPC_FIREWALL_EGRESS_NAME \
+ --direction=EGRESS \
+ --priority=1000 \
+ --network=$VPC_NETWORK_NAME \
+ --action=ALLOW \
+ --rules=tcp:22,tcp:80,tcp:443,tcp:1522,tcp:3389 \
+ --destination-ranges=0.0.0.0/0 \
+ --target-tags=bastion
diff --git a/multicloud/gcloud-cli/delete-adb.sh b/multicloud/gcloud-cli/delete-adb.sh
new file mode 100755
index 0000000..97631fb
--- /dev/null
+++ b/multicloud/gcloud-cli/delete-adb.sh
@@ -0,0 +1,21 @@
+#!/bin/bash
+
+# Copyright (c) 2024 Oracle and/or its affiliates.
+# Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl/
+
+# ensure you update the config file to match your deployment prior to running the deployment
+
+source ./config
+
+# Ask for confirmation
+echo "Are you sure you want to delete database '$ADB_NAME' in region '$REGION'?"
+echo "Enter (y/n):"
+read confirmation
+
+if [[ $confirmation == [yY] || $confirmation == [yY][eE][sS] ]]; then
+ echo "Deleting Autonomous Database"
+ gcloud oracle-database autonomous-databases delete $ADB_NAME --location=$REGION --quiet
+
+else
+ echo "Deletion cancelled. The database was not deleted."
+fi
\ No newline at end of file
diff --git a/multicloud/gcloud-cli/delete-all-resources.sh b/multicloud/gcloud-cli/delete-all-resources.sh
new file mode 100755
index 0000000..ae9a89a
--- /dev/null
+++ b/multicloud/gcloud-cli/delete-all-resources.sh
@@ -0,0 +1,34 @@
+#!/bin/bash
+
+# Copyright (c) 2024 Oracle and/or its affiliates.
+# Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl/
+
+# ensure you update the config file to match your deployment prior to running the deployment
+
+source ./config
+
+# Ask for confirmation
+echo ""
+echo "Are you sure you want to delete the sample resources?"
+echo "- ADB: $ADB_NAME"
+echo "- Bucket: $BUCKET_NAME"
+echo "- VM: $VM_NAME"
+echo ""
+echo "Enter (y/n)"
+read confirmation
+
+if [[ $confirmation == [yY] || $confirmation == [yY][eE][sS] ]]; then
+ echo "Deleting Autonomous Database"
+ gcloud oracle-database autonomous-databases delete $ADB_NAME --location=$REGION --quiet
+
+ echo "Deleting storage bucket $BUCKET_NAME"
+ gcloud storage rm -r gs://$BUCKET_NAME/*
+ gcloud storage buckets delete gs://$BUCKET_NAME
+
+ echo "Deleting VM"
+ gcloud compute instances delete $VM_NAME --zone $REGION-a
+
+ echo "The network has not been deleted. You can do that using the console.".
+else
+ echo "Deletion cancelled."
+fi
\ No newline at end of file
diff --git a/multicloud/gcloud-cli/delete-compute_vm.sh b/multicloud/gcloud-cli/delete-compute_vm.sh
new file mode 100755
index 0000000..014af87
--- /dev/null
+++ b/multicloud/gcloud-cli/delete-compute_vm.sh
@@ -0,0 +1,5 @@
+# Ask for confirmation
+source ./config
+echo ""
+echo "Deleting compute instance '$VM_NAME'"
+gcloud compute instances delete $VM_NAME --zone $REGION-a
diff --git a/multicloud/gcloud-cli/delete-data-lake-storage.sh b/multicloud/gcloud-cli/delete-data-lake-storage.sh
new file mode 100755
index 0000000..47ec458
--- /dev/null
+++ b/multicloud/gcloud-cli/delete-data-lake-storage.sh
@@ -0,0 +1,24 @@
+#!/bin/bash
+
+# Copyright (c) 2024 Oracle and/or its affiliates.
+# Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl/
+source ./config
+
+echo ""
+echo "##"
+echo "# Delete data lake storage account"
+echo "##"
+echo ""
+
+# Ask for confirmation
+echo ""
+echo "Are you sure you want to delete the the bucket '$BUCKET_NAME'? All files will be deleted!"
+echo "Enter (y/n):"
+read confirmation
+
+if [[ $confirmation == [yY] || $confirmation == [yY][eE][sS] ]]; then
+ # ensure you update the config file to match your deployment prior to running the deployment
+ source config
+ gcloud storage rm -r gs://$BUCKET_NAME/*
+ gcloud storage buckets delete gs://$BUCKET_NAME
+fi
diff --git a/multicloud/gcloud-cli/delete-network.sh b/multicloud/gcloud-cli/delete-network.sh
new file mode 100755
index 0000000..e0c9600
--- /dev/null
+++ b/multicloud/gcloud-cli/delete-network.sh
@@ -0,0 +1,41 @@
+#!/bin/bash
+
+# Copyright (c) 2024 Oracle and/or its affiliates.
+# Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl/
+
+echo ""
+echo "##"
+echo "# Delete VPC network $VPC_NETWORK_NAME"
+echo "##"
+echo ""
+
+# ensure you update the config file to match your deployment prior to running the deployment
+source config
+
+# Ask for confirmation
+echo "Are you sure you want to delete the VPC Network '$VPC_NETWORK_NAME'?"
+echo "Enter (y/n):"
+read confirmation
+
+if [[ $confirmation == [yY] || $confirmation == [yY][eE][sS] ]]; then
+ # Deleting firewall rules
+ echo "Deleting firewall rules for $VPC_NETWORK_NAME"
+ gcloud compute firewall-rules delete $VPC_FIREWALL_INGRESS_NAME --quiet
+ gcloud compute firewall-rules delete $VPC_FIREWALL_EGRESS_NAME --quiet
+
+ # Delete the subnet
+ echo "Deleting client subnet $SUBNET_CLIENT_NAME in $VPC_NETWORK_NAME"
+ gcloud compute networks subnets delete $SUBNET_CLIENT_NAME --quiet
+
+ # Deleting the network
+ echo "Deleting network $VPC_NETWORK_NAME"
+ gcloud compute networks delete $VPC_NETWORK_NAME --quiet
+
+ if [ $? -eq 0 ]; then
+ echo "Resource group '$VPC_NETWORK_NAME' has been successfully deleted."
+ fi
+else
+ echo "Deletion cancelled."
+fi
+
+
diff --git a/multicloud/gcloud-cli/show-adb-info.sh b/multicloud/gcloud-cli/show-adb-info.sh
new file mode 100755
index 0000000..3362390
--- /dev/null
+++ b/multicloud/gcloud-cli/show-adb-info.sh
@@ -0,0 +1,36 @@
+#!/bin/bash
+# Copyright (c) 2024 Oracle and/or its affiliates.
+# Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl/
+
+echo ""
+echo "##"
+echo "# show autonomous database info"
+echo "##"
+echo ""
+
+# ensure you update the config file to match your deployment prior to running the deployment
+source ./config
+echo ""
+gcloud oracle-database autonomous-databases describe $ADB_NAME --location=$REGION --format="table(database,properties.dbWorkload, properties.state, properties.computeCount,properties.dataStorageSizeGb)"
+
+echo ""
+echo "JDBC Connection string:"
+# The string containing multiple descriptions
+DESCRIPTIONS=`gcloud oracle-database autonomous-databases describe $ADB_NAME --location=$REGION --format="value(properties.connectionStrings.profiles.value)"`
+
+# Extract the "HIGH" description specifically
+CONN_STR=$(echo "$DESCRIPTIONS" | awk -F ';' '{
+ for (i=1; i<=NF; i++) {
+ if ($i ~ /_high\./) {
+ print $i
+ exit
+ }
+ }
+}')
+
+
+#CONN_STR=${CONN_STR#?} # Remove first character
+#CONN_STR=${CONN_STR%?} # Remove last character
+CONN_STR=jdbc:oracle:thin:@$CONN_STR
+echo $CONN_STR
+echo ""
diff --git a/multicloud/gcloud-cli/show-data-lake-storage-info.sh b/multicloud/gcloud-cli/show-data-lake-storage-info.sh
new file mode 100755
index 0000000..6eff2be
--- /dev/null
+++ b/multicloud/gcloud-cli/show-data-lake-storage-info.sh
@@ -0,0 +1,19 @@
+#!/bin/bash
+# Copyright (c) 2024 Oracle and/or its affiliates.
+# Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl/
+
+
+# ensure you update the config file to match your deployment prior to running the deployment
+
+source config
+
+echo ""
+gcloud storage ls --long --recursive gs://$BUCKET_NAME
+
+echo ""
+echo "Bucket Name: $BUCKET_NAME"
+gcloud storage hmac list
+echo "Storage URL:"
+echo "https://storage.googleapis.com/$BUCKET_NAME"
+echo ""
+
diff --git a/multicloud/images/gcloud-deployment.png b/multicloud/images/gcloud-deployment.png
new file mode 100644
index 0000000..35837b7
Binary files /dev/null and b/multicloud/images/gcloud-deployment.png differ
diff --git a/multicloud/oci-cli/README.md b/multicloud/oci-cli/README.md
index 86e06cc..57c84ea 100644
--- a/multicloud/oci-cli/README.md
+++ b/multicloud/oci-cli/README.md
@@ -15,11 +15,16 @@ The steps below show how to create an Autonomous Database using the OCI CLI.
* [Use OCI Object Storage](https://docs.oracle.com/en-us/iaas/Content/Security/Reference/objectstorage_security.htm#iam-policies)
## Deploy your Autonomous Database
-Use the following scripts to deploy your Autonomous Database and sample data on OCI. Because the sample script deploys ADB on a public endpoint, the architecture is very simple:
+Autonomous Database will be deployed on a public endpoint - which will simplify the architecture. The OCI CLI will deploy:
+* An Oracle Autonomous Database. It is deployed to a private subnet on that VPC Network. That private subnet is managed by Oracle Database@Google Cloud.
+* An Object Storage bucket with sample data
+* Sample code will use OCI GenAI (or other AI service).
+* Use your computer as a client.
+

-You can run the scripts independently or run `create-all-resources.sh`. Simply update the [`config`](#configuration-file) prior to running the scripts:
+You can run the OCI CLI scripts independently or run `create-all-resources.sh`. Simply update the [`config`](#configuration-file) prior to running the scripts:
|Script|Description|
|----|---|
@@ -28,6 +33,8 @@ You can run the scripts independently or run `create-all-resources.sh`. Simply u
|[create-all-resources.sh](create-all-resources.sh)|Creates your resource group, network, ADB and VM|
|[create-data-lake-storage.sh](create-data-lake-storage.sh)|Creates an OCI Object Storage bucket and uploads sample data into that bucket|
|[delete-all-resources.sh](delete-all-resources.sh)|Deletes your compartment, bucket and Autonomous Database|
+|[show-adb-info.sh](show-adb-info.sh)|Shows information about your ADB - including you JDBC connection details to the HIGH service|
+|[show-data-lake-storage-info.sh](show-data-lake-storage-info.sh)|Shows information about your data lake storage - including the storage endpoint URL|
### Configuration file
The OCI cli deployment scripts rely on settings found in the config file. These resources **will be created** by the scripts. Update the config file prior to running any of the scripts.
@@ -63,6 +70,8 @@ Connect to your Autonomous Database!
* Use these great VS Code extensions that help you develop and debug your database apps:
* SQL Developer for VS Code ([Learn More](https://www.oracle.com/database/sqldeveloper/vscode/) | [Marketplace](https://marketplace.visualstudio.com/items?itemName=Oracle.sql-developer))
* Oracle Developer Tools for VS Code ([Learn More](https://docs.oracle.com/en/database/oracle/developer-tools-for-vscode/getting-started/gettingstarted.html) | [Marketplace](https://marketplace.visualstudio.com/items?itemName=Oracle.oracledevtools))
+* [Use the sample scripts](../../sql/README.md) to learn how to use different features - like Select AI, data lake integration, JSON, and more.
+
#### JDBC Example:
JDBC is a common way to connect to Autonomous Database. For example, you can use the **Custom JDBC URL** in the VS Code SQL Developer Extension:
diff --git a/multicloud/oci-cli/config b/multicloud/oci-cli/config
index ed1d017..2f11722 100644
--- a/multicloud/oci-cli/config
+++ b/multicloud/oci-cli/config
@@ -2,9 +2,19 @@
# Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl/
# update the values below to match your requirements.
-TENANCY_OCID="ocid1.tenancy.oc1..aaaaaaaafcue47pqmrf4vigneebgbcmmoy5r7xvoypicjqqge32ewnrcyx2a"
-REGION="us-ashburn-1"
-COMPARTMENT_NAME="from-github"
+
+# your Tenancy unique identifier
+# example: ocid1.tenancy.oc1..example....
+TENANCY_OCID=""
+# Region where resources will be deployed
+# example: us-ashburn-1
+REGION=""
+
+# Compartment name. Will be created if it does not exist
+# example: mycompartment
+COMPARTMENT_NAME=""
+
+# Autonomous Database name
ADB_NAME="quickstart"
## CLOUD STORAGE
@@ -13,4 +23,5 @@ BUCKET_NAME="adb-sample"
## IDENTITIES
# The password is for the ADB ADMIN user. It must be between 12 and 30 characters long and must include at least one uppercase letter, one lowercase letter, and one numeric character
-USER_PASSWORD="bigdataPM2019#"
\ No newline at end of file
+# example: watchS0meMovies#
+USER_PASSWORD=""
\ No newline at end of file
diff --git a/multicloud/oci-cli/config.default b/multicloud/oci-cli/config.default
index 70f5351..2f11722 100644
--- a/multicloud/oci-cli/config.default
+++ b/multicloud/oci-cli/config.default
@@ -2,9 +2,19 @@
# Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl/
# update the values below to match your requirements.
-TENANCY_OCID="your-tenancy-ocid"
-REGION="us-ashburn-1"
-COMPARTMENT_NAME="your-target-compartment"
+
+# your Tenancy unique identifier
+# example: ocid1.tenancy.oc1..example....
+TENANCY_OCID=""
+# Region where resources will be deployed
+# example: us-ashburn-1
+REGION=""
+
+# Compartment name. Will be created if it does not exist
+# example: mycompartment
+COMPARTMENT_NAME=""
+
+# Autonomous Database name
ADB_NAME="quickstart"
## CLOUD STORAGE
@@ -13,4 +23,5 @@ BUCKET_NAME="adb-sample"
## IDENTITIES
# The password is for the ADB ADMIN user. It must be between 12 and 30 characters long and must include at least one uppercase letter, one lowercase letter, and one numeric character
+# example: watchS0meMovies#
USER_PASSWORD=""
\ No newline at end of file
diff --git a/multicloud/oci-cli/create-compartment.sh b/multicloud/oci-cli/create-compartment.sh
index 65c0851..f0a3409 100755
--- a/multicloud/oci-cli/create-compartment.sh
+++ b/multicloud/oci-cli/create-compartment.sh
@@ -9,4 +9,11 @@ echo ""
# ensure you update the config file to match your deployment prior to running the deployment
source ./config
-oci iam compartment create --region $REGION --compartment-id $TENANCY_OCID --name "$COMPARTMENT_NAME" --description "Created by oracle-autonomous-database-samples"
\ No newline at end of file
+RESULT=$(oci iam compartment create --region $REGION --compartment-id $TENANCY_OCID --name "$COMPARTMENT_NAME" --description "Created by oracle-autonomous-database-samples" 2>&1)
+
+if echo "$RESULT" | grep -q "CompartmentAlreadyExists"; then
+ echo "Compartment '$COMPARTMENT_NAME' already exists. Continuing execution..."
+else
+ echo "Error creating compartment. Exiting."
+ exit 1
+fi
diff --git a/sql/config.sql b/sql/config.sql
index 2a5ba78..eeec41b 100644
--- a/sql/config.sql
+++ b/sql/config.sql
@@ -10,6 +10,7 @@ define CONN='jdbc:oracle:thin:@(description=...)'
-- user name and password used for the sample data
define USER_NAME='moviestream'
-- # The password is for the sample user. It must be between 12 and 30 characters long and must include at least one uppercase letter, one lowercase letter, and one numeric character
+-- example: watchS0meMovies#
define USER_PASSWORD=''
--
@@ -21,9 +22,12 @@ define AI_PROVIDER='oci'
-- The Select AI profile name that encapsulates the AI provider info + tables for NL2SQL
define AI_PROFILE_NAME='genai'
-- This is a database credential that captures the secret key or other connection info
-define AI_CREDENTIAL_NAME='AI_cred'
+define AI_CREDENTIAL_NAME='AI_CRED'
--- The endpoint should be the servername only. For example, myopenai.openai.azure.com. This is not required for OCI GenAI.
+-- The endpoint should be the servername only. This is not required for OCI GenAI.
+-- Examples:
+-- myopenai.openai.azure.com
+-- us-east4-aiplatform.googleapis.com
define AI_ENDPOINT=''
-- API key for AI service. This is not required for OCI GenAI.
define AI_KEY=''
@@ -43,13 +47,21 @@ define AZURE_OPENAI_EMBEDDING_DEPLOYMENT_NAME='your-openai-emedding-deployment-n
-- you can get the storage info by running /multicloud/{oci|azure|gcloud}-cli/show-data-lake-storage-info.sh
define STORAGE_PROVIDER='oci'
-- The url is a pointer to the bucket that will be used for import/export to object storage
+-- Examples:
+-- google: https://storage.googleapis.com/adb-sample
+-- azure : https://devadbstorageacct.blob.core.windows.net/adb-sample
+-- oci : https://adwc4pm.objectstorage.us-ashburn-1.oci.customer-oci.com/n/adwc4pm/b/adb-sample/o
define STORAGE_URL=''
+
-- A database credential encapsulates the authentication details to the object store. Specify a name for the credential below
define STORAGE_CREDENTIAL_NAME='storage_cred'
--- below required for azure
-define STORAGE_KEY=''
-define STORAGE_ACCOUNT_NAME=''
+--Azure storage properties
+define AZURE_STORAGE_ACCOUNT_NAME=''
+define AZURE_STORAGE_KEY=''
+-- Google storage properties
+define GOOGLE_STORAGE_ACCESS_KEY=''
+define GOOGLE_STORAGE_SECRET=''
--
-- OCI API credentials
diff --git a/sql/credential-create.sql b/sql/credential-create.sql
index a596acc..d4edc51 100644
--- a/sql/credential-create.sql
+++ b/sql/credential-create.sql
@@ -13,6 +13,8 @@ prompt "Creating credential: &user_param"
DECLARE
l_exists number := 0;
l_type varchar2(20) := nvl(upper('&user_param'),'ALL');
+ l_username varchar2(400);
+ l_password varchar2(400);
BEGIN
-- AI provider. Note, they will have different syntax based on the provider
if l_type in ('AI','ALL') then
@@ -73,10 +75,14 @@ BEGIN
private_key => '&OCI_PRIVATE_KEY'
);
ELSE
+ -- Google and Azure use different settings for username and password
+ l_username := CASE WHEN UPPER('&STORAGE_PROVIDER') = 'AZURE' THEN '&AZURE_STORAGE_ACCOUNT_NAME' ELSE '&GOOGLE_STORAGE_ACCESS_KEY' END;
+ l_password := CASE WHEN UPPER('&STORAGE_PROVIDER') = 'AZURE' THEN '&AZURE_STORAGE_KEY' ELSE '&GOOGLE_STORAGE_SECRET' END;
+
dbms_cloud.create_credential(
credential_name => '&STORAGE_CREDENTIAL_NAME',
- username => '&STORAGE_ACCOUNT_NAME',
- password => '&STORAGE_KEY'
+ username => l_username,
+ password => l_password
);
END IF; -- OCI vs other AI services
END IF; -- Storage
diff --git a/sql/data-export-to-datalake.sql b/sql/data-export-to-datalake.sql
index 253ea72..6227b8f 100644
--- a/sql/data-export-to-datalake.sql
+++ b/sql/data-export-to-datalake.sql
@@ -1,7 +1,7 @@
-- Copyright (c) 2024 Oracle and/or its affiliates.
-- Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl/
-/** Export data to Azure Data Lake **/
+/** Export data to cloud storage **/
/* PREREQUISITES
Install the sample schema using script
diff --git a/sql/graph-rag.sql b/sql/graph-rag.sql
deleted file mode 100644
index fd23a6d..0000000
--- a/sql/graph-rag.sql
+++ /dev/null
@@ -1,67 +0,0 @@
-
-
-CREATE TABLE MOVIE (
- MOVIE_ID NUMBER,
- TITLE VARCHAR2(400),
- GENRES JSON,
- SUMMARY VARCHAR2(16000)
-);
-
-CREATE TABLE MOVIE_CUSTOMER (
- CUST_ID NUMBER,
- FIRSTNAME VARCHAR(200),
- LASTNAME VARCHAR(200)
-);
-
-CREATE TABLE WATCHED_MOVIE (
- DAY_ID TIMESTAMP(6),
- MOVIE_ID NUMBER,
- PROMO_CUST_ID NUMBER
-);
-
-INSERT INTO MOVIE (MOVIE_ID, TITLE, GENRES, SUMMARY) VALUES
- (1, 'Inception', '{"Action": "Sci-Fi"}', 'A thief who steals corporate secrets through the use of dream-sharing technology is given the inverse task of planting an idea into the mind of a C.E.O.'),
- (2, 'The Matrix', '{"Action": "Sci-Fi"}', 'A computer hacker learns from mysterious rebels about the true nature of his reality and his role in the war against its controllers.'),
- (3, 'The Godfather', '{"Drama": "Crime"}', 'The aging patriarch of an organized crime dynasty transfers control of his clandestine empire to his reluctant son.'),
- (4, 'Titanic', '{"Romance": "Drama"}', 'A seventeen-year-old aristocrat falls in love with a kind but poor artist aboard the luxurious, ill-fated R.M.S. Titanic.'),
- (5, 'Toy Story', '{"Animation": "Adventure"}', 'A cowboy doll is profoundly threatened and jealous when a new spaceman figure supplants him as top toy in a boy''s room.');
-
-INSERT INTO MOVIE_CUSTOMER (CUST_ID, FIRSTNAME, LASTNAME) VALUES
- (101, 'John', 'Doe'),
- (102, 'Jane', 'Smith'),
- (103, 'Sam', 'Wilson'),
- (104, 'Emily', 'Clark'),
- (105, 'Michael', 'Johnson');
-
-INSERT INTO WATCHED_MOVIE (DAY_ID, MOVIE_ID, PROMO_CUST_ID) VALUES
- (TO_TIMESTAMP('2024-10-30 12:34:56', 'YYYY-MM-DD HH24:MI:SS'), 1, 101),
- (TO_TIMESTAMP('2024-10-31 12:34:56', 'YYYY-MM-DD HH24:MI:SS'), 2, 101),
- (TO_TIMESTAMP('2024-09-31 12:34:56', 'YYYY-MM-DD HH24:MI:SS'), 3, 101),
- (TO_TIMESTAMP('2024-10-31 09:15:23', 'YYYY-MM-DD HH24:MI:SS'), 2, 102),
- (TO_TIMESTAMP('2024-11-01 16:45:12', 'YYYY-MM-DD HH24:MI:SS'), 3, 103),
- (TO_TIMESTAMP('2024-11-02 18:22:43', 'YYYY-MM-DD HH24:MI:SS'), 4, 104),
- (TO_TIMESTAMP('2024-11-03 20:01:00', 'YYYY-MM-DD HH24:MI:SS'), 5, 105);
-
-CREATE PROPERTY GRAPH CUSTOMER_WATCHED_MOVIES
- VERTEX TABLES (
- MOVIES_CUSTOMER AS CUSTOMER
- KEY(CUST_ID),
- MOVIES AS MOVIE
- KEY(MOVIE_ID)
- )
- EDGE TABLES(
- WATCHED_MOVIE AS WATCHED
- KEY(DAY_ID, MOVIE_ID, PROMO_CUST_ID)
- SOURCE KEY (PROMO_CUST_ID) REFERENCES CUSTOMER(CUST_ID)
- DESTINATION KEY (MOVIE_ID) REFERENCES MOVIE(MOVIE_ID)
- );
-
- SELECT *
- FROM GRAPH_TABLE(CUSTOMER_WATCHED_MOVIES
- MATCH (c IS CUSTOMER) -[w IS WATCHED]-> (m IS MOVIE)
- COLUMNS(
- c.FIRSTNAME AS FIRSTNAME,
- c.LASTNAME AS LASTNAME,
- m.TITLE AS MOVIE_TITLE,
- w.DAY_ID as DAY_WATCHED)
- );
\ No newline at end of file
diff --git a/sql/property-graph.sql b/sql/property-graph.sql
deleted file mode 100644
index 5309eed..0000000
--- a/sql/property-graph.sql
+++ /dev/null
@@ -1,223 +0,0 @@
-CREATE TABLE MOVIES (
- MOVIE_ID NUMBER,
- TITLE VARCHAR2(400),
- GENRES JSON,
- SUMMARY VARCHAR2(16000)
-);
-
-CREATE TABLE MOVIES_CUSTOMER (
- CUST_ID NUMBER,
- FIRSTNAME VARCHAR(200),
- LASTNAME VARCHAR(200)
-);
-
-CREATE TABLE WATCHED_MOVIE (
- DAY_ID TIMESTAMP(6),
- MOVIE_ID NUMBER,
- PROMO_CUST_ID NUMBER
-);
-
-CREATE TABLE WATCHED_WITH (
- ID NUMBER,
- WP_ID NUMBER,
- WATCHER NUMBER,
- WATCHED_WITH NUMBER,
- DATE_WATCHED TIMESTAMP(6),
- MOVIE_ID NUMBER,
- WATCH_PARTY_NAME VARCHAR(200)
-);
-
-INSERT INTO MOVIES (MOVIE_ID, TITLE, GENRES, SUMMARY) VALUES
- (1, 'Inception', '{"Action": "Sci-Fi"}', 'A thief who steals corporate secrets through the use of dream-sharing technology is given the inverse task of planting an idea into the mind of a C.E.O.'),
- (2, 'The Matrix', '{"Action": "Sci-Fi"}', 'A computer hacker learns from mysterious rebels about the true nature of his reality and his role in the war against its controllers.'),
- (3, 'The Godfather', '{"Drama": "Crime"}', 'The aging patriarch of an organized crime dynasty transfers control of his clandestine empire to his reluctant son.'),
- (4, 'Titanic', '{"Romance": "Drama"}', 'A seventeen-year-old aristocrat falls in love with a kind but poor artist aboard the luxurious, ill-fated R.M.S. Titanic.'),
- (5, 'Toy Story', '{"Animation": "Adventure"}', 'A cowboy doll is profoundly threatened and jealous when a new spaceman figure supplants him as top toy in a boy''s room.');
-
-INSERT INTO MOVIES_CUSTOMER (CUST_ID, FIRSTNAME, LASTNAME) VALUES
- (101, 'John', 'Doe'),
- (102, 'Jane', 'Smith'),
- (103, 'Sam', 'Wilson'),
- (104, 'Emily', 'Clark'),
- (105, 'Michael', 'Johnson');
-
-INSERT INTO WATCHED_MOVIE (DAY_ID, MOVIE_ID, PROMO_CUST_ID) VALUES
- (TO_TIMESTAMP('2024-10-30 12:34:56.123456', 'YYYY-MM-DD HH24:MI:SS.FF'), 1, 101),
- (TO_TIMESTAMP('2024-10-30 12:34:56.123456', 'YYYY-MM-DD HH24:MI:SS.FF'), 1, 102),
- (TO_TIMESTAMP('2024-10-30 12:34:56.123456', 'YYYY-MM-DD HH24:MI:SS.FF'), 1, 103),
- (TO_TIMESTAMP('2024-10-31 12:34:56.123456', 'YYYY-MM-DD HH24:MI:SS.FF'), 2, 101),
- (TO_TIMESTAMP('2024-09-30 12:34:56.123456', 'YYYY-MM-DD HH24:MI:SS.FF'), 3, 101),
- (TO_TIMESTAMP('2024-10-31 09:15:23.654321', 'YYYY-MM-DD HH24:MI:SS.FF'), 2, 102),
- (TO_TIMESTAMP('2024-11-01 16:45:12.987654', 'YYYY-MM-DD HH24:MI:SS.FF'), 3, 103),
- (TO_TIMESTAMP('2024-11-02 18:22:43.123456', 'YYYY-MM-DD HH24:MI:SS.FF'), 4, 104),
- (TO_TIMESTAMP('2024-11-03 20:01:00.000000', 'YYYY-MM-DD HH24:MI:SS.FF'), 5, 105);
-
-INSERT INTO WATCHED_WITH (ID, WP_ID, WATCHER, WATCHED_WITH, DATE_WATCHED, MOVIE_ID, WATCH_PARTY_NAME) VALUES
- (1, 101, 101, 102, TO_TIMESTAMP('2024-10-30 12:34:56.123456', 'YYYY-MM-DD HH24:MI:SS.FF'), 1, 'New Year Party'),
- (2, 101, 101, 103, TO_TIMESTAMP('2024-10-30 12:34:56.123456', 'YYYY-MM-DD HH24:MI:SS.FF'), 1, 'New Year Party'),
- (3, 101, 102, 103, TO_TIMESTAMP('2024-10-30 12:34:56.123456', 'YYYY-MM-DD HH24:MI:SS.FF'), 1, 'New Year Party'),
- (4, 102, 101, 103, TO_TIMESTAMP('2024-10-30 12:34:56.123456', 'YYYY-MM-DD HH24:MI:SS.FF'), 3, 'Doe Watch Party'),
- (5, 102, 101, 103, TO_TIMESTAMP('2024-10-30 12:34:56.123456', 'YYYY-MM-DD HH24:MI:SS.FF'), 3, 'Doe Watch Party'),
- (6, 102, 104, 103, TO_TIMESTAMP('2024-10-30 12:34:56.123456', 'YYYY-MM-DD HH24:MI:SS.FF'), 3, 'Doe Watch Party'),
- (7, 102, 101, 104, TO_TIMESTAMP('2024-10-30 12:34:56.123456', 'YYYY-MM-DD HH24:MI:SS.FF'), 3, 'Doe Watch Party');
-
-
-BEGIN
-DBMS_CLOUD.CREATE_CREDENTIAL (
- credential_name => 'GRAPH_CREDENTIAL',
- user_ocid => 'ocid1.user.oc1..aaaaaaaan4vamdkpzj4mcayzihhbdbty27bsomnlpmshu5hcy3w6ceqfwieq',
- tenancy_ocid => 'ocid1.tenancy.oc1..aaaaaaaaj4ccqe763dizkrcdbs5x7ufvmmojd24mb6utvkymyo4xwxyv3gfa',
- private_key => 'MIIEvAIBADANBgkqhkiG9w0BAQEFAASCBKYwggSiAgEAAoIBAQCDoD9ZzA5BMbtx
-7wf/RnIKilbzhTNAtqMlbQD7s5SbxJkG8q5TfdZLFNFMa453czF6ArwmA5XEU2j1
-5ndunsXNdWJdPUXIIg2E2B1xrvTQ2fK7yxpTzCpTQEIBKcPUrUdGIfP4wssOJJ1m
-NUrR5a9CILWxj26E4OduPFLFSI05uMtXGz510u8MBJMBhcRW2OZPpAozwtJz0sOZ
-hwCKDqZYQQTmFYjqZTdsMbuCef44EU1iuFg6rSMEZI3d65AQ4+beldVVu7iOVTvy
-XoEZpedk8EPGjl9zhRMheOA5unoMfcKvGWPdqUIQXOSyUe5lPu2TlEflAkAyx55d
-FfxSvdcfAgMBAAECggEADon51WpjvLHImaokgP8AA8gKGLYC1kgWN0EWFbddV+Nc
-X9cYiGJi9EIlaEKNJwaTVX3N2IkW1uu7sUW2tYJWKP6pUdE9zwBr69uPTQpdQmCA
-nlut2cm4dx+m6sf5OJm3QLjpYPXrRfGnbS2/yPWM6UoBp85HD+hjx9pj7iS4bMKj
-LT+7CwNgicEw+J6N2omr5qeGaavyUlWQ4tSIZKhCKliBnk7CNvVkJ7z/ywugc9Ns
-Z0vlCrhduP1VpsrvzcXmEIwGSP7Z+L7bTQUkULMtxOE/LXLtDFd/I/HO32qEgbT7
-Bo0LDRvNC9+fthulJ3wRu1XpyNJm5RMtiywMIzHT0QKBgQC5eIghgjHRBVk3rEyr
-40yTKv7Z+FJcQywsT146QpxpEeTJrCnPMoZwecVkGqkQUMWTwQpYU6dUBnLyqcMl
-FVrrAoYfmvECeCjqWS11281WQCeYzdIDJ328T+wrFHh4RjFh4GmzQQVU6ZiS5iUT
-ylTtaqH+jvdMkjLJXvbz4YUypwKBgQC1rfPSxF5ChFzSsW8LU/whVs/Mtf+LbxJ6
-j2MeAqzzKng9rEZPP0r8qQmWGrUvsyx0ikslMJC88ZmG+JWHtVkbUVxQckpyfCIz
-971AFLxLZ1+rjFdqAj124MsYndXUyJ16Lg+BShsiCDlair08YcsVt7k0aPS3vTlA
-svOqb6qeyQKBgBlkzFayKbnxnoaF27WJGHnp4Bzd6ADj3Y9vino1lo64OXf3T34j
-785Ejecn00/9jx+sxYrUYUua2nApGCPiqaEVpmF7aFYrN4bmkNfbMWEGxaUhQQjX
-hlqbIr2/PsNQ8P/ypuY5F87JcO9j/V2ZTUl4WReuYWOlfLiffPZlQURvAoGAQqG9
-3vsuJu8srAlvVJRE0GVqaQYG5zihalnUXFlW3QgieVwJnV71PZ0xat/4u7nXABcI
-YGdjbiidyia5kMAuIhrA5LBGJZ7pXG3r9uij9nO/Xsdl9/dCW6suUaTxm8zIFNt3
-zE9FjEG/5zkjFlY3iYuMXXBw8EJyEQyQ2V2DEiECgYB9NzPtecCBkcFVgvSaLlAy
-hK4aQ4EFNe9tzK8v/J0UDNehJPHaapqfnc67o2dQaKdgW61R4IZTiR8B3GUB46Zm
-JZGZDfyPVx/V2lo12vm+TaPntdgcvUBmvtCN/lW7qg3pmsiGOhaJ8Dq+JUkaqDpD
-32uYReMIx+O7R6ua5rmuiA==',
-fingerprint => '5d:cf:7e:55:16:20:e1:60:f6:31:57:4d:ab:f7:35:98');
-END;
-/
-
-BEGIN
- -- drops the profile if it already exists
- DBMS_CLOUD_AI.drop_profile(profile_name => 'genai', force => true);
-
- -- Create an AI profile that uses the default LLAMA model on OCI
- dbms_cloud_ai.create_profile(
- profile_name => 'genai',
- attributes =>
- '{"provider": "oci",
- "credential_name": "GRAPH_CREDENTIAL",
- "comments":"true",
- "oci_compartment_id": "ocid1.compartment.oc1..aaaaaaaajlby3soifhkjobwner46lpjf5zvwugnzrmdfymtcdu6r2lvtti7a",
- "region":"us-chicago-1",
- "object_list": [
- {"owner": "ADMIN", "name": "MOVIES"},
- {"owner": "ADMIN", "name": "MOVIES_CUSTOMER"},
- {"owner": "ADMIN", "name": "WATCHED_MOVIE"},
- {"owner": "ADMIN", "name": "WATCHED_WITH"}
- ]
- }'
- );
- END;
-
-
-
-CREATE PROPERTY GRAPH CUSTOMER_WATCHED_MOVIES
- VERTEX TABLES (
- MOVIES_CUSTOMER AS CUSTOMER
- KEY(CUST_ID),
- MOVIES AS MOVIE
- KEY(MOVIE_ID)
- )
- EDGE TABLES(
- WATCHED_MOVIE AS WATCHED
- KEY(DAY_ID, MOVIE_ID, PROMO_CUST_ID)
- SOURCE KEY (PROMO_CUST_ID) REFERENCES CUSTOMER(CUST_ID)
- DESTINATION KEY (MOVIE_ID) REFERENCES MOVIE(MOVIE_ID),
- WATCHED_WITH
- KEY(ID)
- SOURCE KEY (WATCHER) REFERENCES CUSTOMER(CUST_ID)
- DESTINATION KEY (WATCHED_WITH) REFERENCES CUSTOMER(CUST_ID)
- );
-
--- Get the Movie title and summary for each movie a customer has watched
--- We can send this to an LLM as a Graph RAG solution to answer questions like
---- "What is the genre of {movie} based on this summary"
-SELECT DISTINCT MOVIE_TITLE, MOVIE_SUMMARY
- FROM GRAPH_TABLE( CUSTOMER_WATCHED_MOVIES
- MATCH (c1 IS CUSTOMER)-[e1 IS WATCHED]->(m IS MOVIE)
- COLUMNS (m.title as MOVIE_TITLE, m.summary as MOVIE_SUMMARY)
- );
-
-WITH prompt_document AS (
- SELECT
- JSON_OBJECT ('TASK' VALUE 'What is the genre of this movie based on this summary',
- MOVIE_TITLE,
- MOVIE_SUMMARY ) AS prompt_details
- FROM GRAPH_TABLE(CUSTOMER_WATCHED_MOVIES
- MATCH (c1 IS CUSTOMER)-[e1 IS WATCHED]->(m IS MOVIE)
- COLUMNS (m.title as MOVIE_TITLE, m.summary as MOVIE_SUMMARY)
- )
-)
-SELECT
- DBMS_LOB.SUBSTR(DBMS_CLOUD_AI.GENERATE(
- PROMPT => prompt_details,
- PROFILE_NAME => 'genai',
- ACTION => 'chat'
- ), 4000, 1) AS Answer
-FROM prompt_document;
-
--- Find customers who are connected through 2/3 watch party connections
-SELECT DISTINCT *
- FROM GRAPH_TABLE( CUSTOMER_WATCHED_MOVIES
- MATCH (c1 IS CUSTOMER)-[e1 IS WATCHED_WITH]-{2,3}(c2 IS CUSTOMER)
- COLUMNS (c1.CUST_ID as c1_cust_id, c1.FIRSTNAME as c1_fist_name, c2.cust_id as c2_cust_id, c2.FIRSTNAME as c2_fist_name)
- );
-
--- Find customers who have watched the same movie and are connected in 2/3 hops, excluding results where customer 1 and 2 are the same
--- This can be sent to an LLM as part of a Graph RAG solution to answer questions like
--- "Based on this dataset of movies customers who are connected through friends of friends have watched, containing customer IDs, names, movie titles, genres and summaries. How would you describe the movie watching preferences of these users?"
-SELECT DISTINCT *
- FROM GRAPH_TABLE(CUSTOMER_WATCHED_MOVIES
- MATCH (c1 IS CUSTOMER)-[e1 IS WATCHED_WITH]-{2,3}(c2 IS CUSTOMER) -[e2 is WATCHED]-> (m is MOVIE),
- (c1) -[e3 is WATCHED]-> (m is MOVIE)
- WHERE c1.cust_id <> c2.cust_id
- COLUMNS (c1.CUST_ID as c1_cust_id, c1.FIRSTNAME as c1_fist_name, c2.cust_id as c2_cust_id, c2.FIRSTNAME as c2_fist_name, m.title as movie_title, m.genres as movie_genre, m.summary as movie_summary)
- );
-
-
--- Create the JSON object with task and result set
-WITH prompt_document AS (
- SELECT
- JSON_OBJECT(
- 'TASK' VALUE 'Based on this dataset of movies customers who are connected through friends of friends have watched, containing customer IDs, names, movie titles, genres and summaries. How would you describe the movie watching preferences of these users?',
- 'PROMPT_DETAILS' VALUE JSON_ARRAYAGG(
- JSON_OBJECT(
- 'c1_cust_id' VALUE c1_cust_id,
- 'c1_fist_name' VALUE c1_fist_name,
- 'c2_cust_id' VALUE c2_cust_id,
- 'movie_title' VALUE movie_title,
- 'movie_genre' VALUE movie_genre,
- 'movie_summary' VALUE movie_summary
- ) RETURNING CLOB
- ) RETURNING CLOB
- ) AS prompt_details
- FROM GRAPH_TABLE(
- CUSTOMER_WATCHED_MOVIES
- MATCH (c1 IS CUSTOMER)-[e1 IS WATCHED_WITH]-{2,3}(c2 IS CUSTOMER) -[e2 is WATCHED]-> (m is MOVIE),
- (c1) -[e3 is WATCHED]-> (m is MOVIE)
- WHERE c1.cust_id <> c2.cust_id
- COLUMNS (c1.CUST_ID as c1_cust_id, c1.FIRSTNAME as c1_fist_name, c2.cust_id as c2_cust_id, c2.FIRSTNAME as c2_fist_name, m.title as movie_title, m.genres as movie_genre, m.summary as movie_summary)
- )
-)
-SELECT
- DBMS_LOB.SUBSTR(
- DBMS_CLOUD_AI.GENERATE(
- PROMPT => prompt_details,
- PROFILE_NAME => 'genai',
- ACTION => 'chat'
- ),
- 4000,
- 1
- ) AS Answer
-FROM prompt_document;