Skip to content

Commit 42c7f96

Browse files
authored
+ GeneratesRandomData.ipynb
1 parent 9aa9384 commit 42c7f96

File tree

1 file changed

+1
-0
lines changed

1 file changed

+1
-0
lines changed
Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
{"cells":[{"cell_type":"code","source":["# Generates Dummy json file in Files/\n","\n","# Import necessary libraries\n","from pyspark.sql import SparkSession\n","from pyspark.sql.types import *\n","import random\n","from datetime import datetime, timedelta\n","\n","# Initialize Spark session (if not already initialized)\n","spark = SparkSession.builder.appName(\"GenerateRandomData\").getOrCreate()\n","\n","# Function to generate random data\n","def generate_random_data(num_entries):\n"," data = []\n"," for i in range(1, num_entries + 1):\n"," name = f\"User{i}\"\n"," entry = {\n"," \"id\": i,\n"," \"name\": name,\n"," \"age\": random.randint(18, 65),\n"," \"email\": f\"{name.lower()}@example.com\",\n"," \"created_at\": (datetime.now() - timedelta(days=random.randint(0, 365))).strftime(\"%Y-%m-%d %H:%M:%S\")\n"," }\n"," data.append(entry)\n"," return data\n","\n","# Generate 10 random entries\n","random_data = generate_random_data(10)\n","\n","# Define schema for the DataFrame\n","schema = StructType([\n"," StructField(\"id\", IntegerType(), True),\n"," StructField(\"name\", StringType(), True),\n"," StructField(\"age\", IntegerType(), True),\n"," StructField(\"email\", StringType(), True),\n"," StructField(\"created_at\", StringType(), True)\n","])\n","\n","# Create a DataFrame from the random data\n","df_random_data = spark.createDataFrame(random_data, schema=schema)\n","\n","# Write the DataFrame to the Lakehouse in the specified path\n","output_path = \"abfss://{WORKSPACE-NAME}@onelake.dfs.fabric.microsoft.com/raw_Bronze.Lakehouse/Files/random_data\" # Replace {WORKSPACE-NAME}\n","df_random_data.write.format(\"delta\").mode(\"overwrite\").save(output_path)\n","\n","print(f\"Random data has been saved to the Lakehouse at '{output_path}'.\")"],"outputs":[],"execution_count":null,"metadata":{"microsoft":{"language":"python","language_group":"synapse_pyspark"}},"id":"8d820f25-3c2e-45b3-8a08-af78f0d45e1d"}],"metadata":{"kernel_info":{"name":"synapse_pyspark"},"kernelspec":{"name":"synapse_pyspark","language":"Python","display_name":"Synapse PySpark"},"language_info":{"name":"python"},"microsoft":{"language":"python","language_group":"synapse_pyspark","ms_spell_check":{"ms_spell_check_language":"en"}},"nteract":{"version":"[email protected]"},"spark_compute":{"compute_id":"/trident/default","session_options":{"conf":{"spark.synapse.nbs.session.timeout":"1200000"}}},"dependencies":{}},"nbformat":4,"nbformat_minor":5}

0 commit comments

Comments
 (0)