Skip to content

Commit 302de5e

Browse files
authored
Merge pull request #1648 from oracle-devrel/bpeulen_change
token and password changes
2 parents fdd26a1 + 162e7b7 commit 302de5e

File tree

3 files changed

+27
-27
lines changed

3 files changed

+27
-27
lines changed

data-platform/open-source-data-platforms/oci-data-flow/code-examples/connect-to-Snowflake/files/SparkSnowflake.ipynb

Lines changed: 9 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -84,8 +84,8 @@
8484
" .enableHiveSupport() \\\n",
8585
" .getOrCreate()\n",
8686
"\n",
87-
"namespace = 'fro8fl9kuqli'\n",
88-
"bucket = 'snowflake'\n",
87+
"namespace = ''\n",
88+
"bucket = ''\n",
8989
"folder = 'electrodata'\n",
9090
"\n",
9191
"output_path='oci://'+bucket+'@'+namespace+'/'+folder\n",
@@ -131,7 +131,7 @@
131131
" .option(\"dbtable\", \"tabLE\")\n",
132132
" .option(\"sfUrl\", \"XXXXXXX.snowflakecomputing.com\")\n",
133133
" .option(\"sfUser\", \"TEST\")\n",
134-
" .option(\"sfPassword\", \"PASS\")\n",
134+
" .option(\"sfPassword\", \"[ADD PASSWORD]]\")\n",
135135
" .option(\"sfDatabase\", \"TEST\")\n",
136136
" .option(\"sfSchema\", \"TEST\")\n",
137137
" .option(\"sfWarehouse\", \"COMPUTE_WH\")\n",
@@ -543,11 +543,11 @@
543543
"outputs": [],
544544
"source": [
545545
"import requests\n",
546-
"TOKEN = \"6671512971:AAEjIUEFxAcuK5pCl0EinBm8MDQ-s0csDl8\"\n",
546+
"TOKEN = \"[ADD TOKEN HERE]\"\n",
547547
"# url = f\"https://api.telegram.org/bot{TOKEN}/getUpdates\"\n",
548548
"# print(requests.get(url).json())\n",
549549
"\n",
550-
"chat_id = \"844904100\"\n",
550+
"chat_id = \"[ADD CHAT ID HERE]\"\n",
551551
"message = \"OCI Python can send a message to your telegram chat!\"\n",
552552
"url = f\"https://api.telegram.org/bot{TOKEN}/sendMessage?chat_id={chat_id}&text={message}\"\n",
553553
"print(requests.get(url).json()) # this sends the message"
@@ -574,8 +574,8 @@
574574
" return response.json()\n",
575575
"\n",
576576
"# Example usage:\n",
577-
"bot_token = '6671512971:AAEjIUEFxAcuK5pCl0EinBm8MDQ-s0csDl8'\n",
578-
"chat_id = '844904100'\n",
577+
"bot_token = ''\n",
578+
"chat_id = ''\n",
579579
"message = 'Hello message nr 5!'\n",
580580
"\n",
581581
"a = send_telegram_message(bot_token, chat_id, message)\n",
@@ -592,10 +592,10 @@
592592
"source": [
593593
"ds = spark.read.format(\"oracle\") \\\n",
594594
" .option(\"walletUri\",\"oci://dataflow_app@\"+namespace+\"/Adw_Forza_wallet.zip\") \\\n",
595-
" .option(\"connectionId\",\"db201909271450_high\") \\\n",
595+
" .option(\"connectionId\",\"\") \\\n",
596596
" .option(\"query\", \"select * from car.kafka_stream_dv\") \\\n",
597597
" .option(\"user\", \"CAR\")\\\n",
598-
" .option(\"password\", \"WelcomeBack123#\")\\\n",
598+
" .option(\"password\", \"\")\\\n",
599599
" .load()\n",
600600
"\n"
601601
]

data-platform/open-source-data-platforms/oci-data-flow/code-examples/connect-to-adw/files/ADW_Spark-s3-AWS.ipynb

Lines changed: 12 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -64,7 +64,7 @@
6464
},
6565
{
6666
"cell_type": "code",
67-
"execution_count": 1,
67+
"execution_count": null,
6868
"id": "11866f83-260e-4b5c-a38f-5dd41bf8b6a9",
6969
"metadata": {
7070
"tags": []
@@ -77,7 +77,7 @@
7777
"\n",
7878
"compartment_id = os.environ.get(\"NB_SESSION_COMPARTMENT_OCID\")\n",
7979
"logs_bucket_uri = \"oci://dataflow_app@frqap2zhtzbe/log_pystudio\"\n",
80-
"#metastore_id = \"ocid1.datacatalogmetastore.oc1.eu-frankfurt-1.amaaaaaangencdyadlqoeypyt3hks3g5j34axfyfl3rof5ug2z7vokyury3a\"\n",
80+
"#metastore_id = \"\"\n",
8181
"\n",
8282
"def prepare_command(command: dict) -> str:\n",
8383
" \"\"\"Converts dictionary command to the string formatted commands.\"\"\"\n",
@@ -100,7 +100,7 @@
100100
},
101101
{
102102
"cell_type": "code",
103-
"execution_count": 6,
103+
"execution_count": null,
104104
"id": "c6fdfe55-5ec2-4d56-9cd2-9f4cca5cb980",
105105
"metadata": {
106106
"tags": []
@@ -190,8 +190,8 @@
190190
" \"spark.oracle.datasource.enabled\":\"true\",\n",
191191
" \"spark.jars.packages\": \"org.apache.hadoop:hadoop-aws:3.2.1,com.amazonaws:aws-java-sdk-s3:1.11.655,com.amazonaws:aws-java-sdk-core:1.11.655,org.apache.spark:spark-hadoop-cloud_2.12:3.2.1,org.postgresql:postgresql:42.7.1\",\n",
192192
" \"spark.hadoop.fs.s3a.impl\": \"org.apache.hadoop.fs.s3a.S3AFileSystem\",\n",
193-
" \"spark.hadoop.fs.s3a.access.key\":\"AKIAXWR7W6DM7ZX22DVB\",\n",
194-
" \"spark.hadoop.fs.s3a.secret.key\":\"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\",\n",
193+
" \"spark.hadoop.fs.s3a.access.key\":\"access key\",\n",
194+
" \"spark.hadoop.fs.s3a.secret.key\":\"scecret key\",\n",
195195
" },\n",
196196
" }\n",
197197
")\n",
@@ -453,7 +453,7 @@
453453
},
454454
{
455455
"cell_type": "code",
456-
"execution_count": 16,
456+
"execution_count": null,
457457
"id": "dcd2cc52-eaff-46e1-9790-efc6e8d74a9e",
458458
"metadata": {
459459
"tags": []
@@ -602,8 +602,8 @@
602602
"source": [
603603
"%%spark\n",
604604
"\n",
605-
"spark.conf.set(\"spark.hadoop.fs.s3a.access.key\", '2c46ed9ff31e07c806cd783f150eb85536c7169c')\n",
606-
"spark.conf.set('spark.hadoop.fs.s3a.secret.key', 'AJI9Bg3wf57bM7Nbfnka4RwPpvCew71wQ3oHI3O+2AY=')\n",
605+
"spark.conf.set(\"spark.hadoop.fs.s3a.access.key\", '')\n",
606+
"spark.conf.set('spark.hadoop.fs.s3a.secret.key', '')\n",
607607
" \n",
608608
" \n",
609609
"# read some json from s3\n",
@@ -651,7 +651,7 @@
651651
},
652652
{
653653
"cell_type": "code",
654-
"execution_count": 7,
654+
"execution_count": null,
655655
"id": "e2eb241b-73bb-468b-a1e0-434006f0318d",
656656
"metadata": {
657657
"tags": []
@@ -689,10 +689,10 @@
689689
"\"\"\"\n",
690690
"\n",
691691
"ds = spark.read.format(\"oracle\") \\\n",
692-
" .option(\"adbId\",\"ocid1.autonomousdatabase.oc1.eu-frankfurt-1.antheljsngencdyase2z3keeufsr4r5wtnskyurvhbd4txb63nb64qu5tvlq\") \\\n",
692+
" .option(\"adbId\",\"ocid1.autonomousdatabase.oc1.eu-frankfurt-1.\") \\\n",
693693
" .option(\"dbtable\", \"testdataflow\") \\\n",
694-
" .option(\"user\", \"admin\")\\\n",
695-
" .option(\"password\", \"WelcomeBack123#2xxe2\")\\\n",
694+
" .option(\"user\", \"user\")\\\n",
695+
" .option(\"password\", \"password\")\\\n",
696696
" .option(\"sessionInitStatement\", plsql_block) \\\n",
697697
" .load()\n",
698698
"\n",

data-platform/open-source-data-platforms/oci-data-flow/code-examples/connect-to-postgresql/files/pyspark-oci-postgresql.ipynb

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -669,7 +669,7 @@
669669
},
670670
{
671671
"cell_type": "code",
672-
"execution_count": 35,
672+
"execution_count": null,
673673
"metadata": {},
674674
"outputs": [],
675675
"source": [
@@ -679,15 +679,15 @@
679679
" url=\"jdbc:postgresql://129.153.75.145:5432/amer\",\n",
680680
" driver='org.postgresql.Driver',\n",
681681
" dbtable=result_table_name,\n",
682-
" user='bisample',\n",
683-
" password='WelcomeBack123#',\n",
682+
" user='user',\n",
683+
" password='password',\n",
684684
" ssl=True,\n",
685685
" sslmode='require').mode('append').save()"
686686
]
687687
},
688688
{
689689
"cell_type": "code",
690-
"execution_count": 42,
690+
"execution_count": null,
691691
"metadata": {},
692692
"outputs": [
693693
{
@@ -702,8 +702,8 @@
702702
"dn.write.format(\"jdbc\")\\\n",
703703
" .option(\"url\", \"jdbc:postgresql://129.153.75.145:5432/amer\")\\\n",
704704
" .option(\"dbtable\", \"public.agg_prod_revenue\")\\\n",
705-
" .option(\"user\", \"bisample\")\\\n",
706-
" .option(\"password\", \"WelcomeBack123#\")\\\n",
705+
" .option(\"user\", \"user\")\\\n",
706+
" .option(\"password\", \"password\")\\\n",
707707
" .option(\"driver\", \"org.postgresql.Driver\")\\\n",
708708
" .option(\"ssl\", True)\\\n",
709709
" .option(\"sslmode\", \"require\")\\\n",

0 commit comments

Comments
 (0)